feat: initial fetcher implementation
This commit is contained in:
1055
Cargo.lock
generated
1055
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
15
default.nix
Normal file
15
default.nix
Normal file
@@ -0,0 +1,15 @@
|
||||
let
|
||||
lockFile = builtins.fromJSON (builtins.readFile ./flake.lock);
|
||||
flake-compat-node = lockFile.nodes.${lockFile.nodes.root.inputs.flake-compat};
|
||||
flake-compat = builtins.fetchTarball {
|
||||
inherit (flake-compat-node.locked) url;
|
||||
sha256 = flake-compat-node.locked.narHash;
|
||||
};
|
||||
|
||||
flake = (
|
||||
import flake-compat {
|
||||
src = ./.;
|
||||
}
|
||||
);
|
||||
in
|
||||
flake.defaultNix
|
||||
15
flake.lock
generated
15
flake.lock
generated
@@ -21,6 +21,20 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1751685974,
|
||||
"narHash": "sha256-NKw96t+BgHIYzHUjkTK95FqYRVKB8DHpVhefWSz/kTw=",
|
||||
"rev": "549f2762aebeff29a2e5ece7a7dc0f955281a1d1",
|
||||
"type": "tarball",
|
||||
"url": "https://git.lix.systems/api/v1/repos/lix-project/flake-compat/archive/549f2762aebeff29a2e5ece7a7dc0f955281a1d1.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
"url": "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1767116409,
|
||||
@@ -40,6 +54,7 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"fenix": "fenix",
|
||||
"flake-compat": "flake-compat",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
fenix.url = "github:nix-community/fenix";
|
||||
fenix.inputs.nixpkgs.follows = "nixpkgs";
|
||||
flake-compat = {
|
||||
url = "https://git.lix.systems/lix-project/flake-compat/archive/main.tar.gz";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
outputs = { nixpkgs, fenix, ... }:
|
||||
let
|
||||
|
||||
@@ -28,12 +28,23 @@ deno_error = "0.7"
|
||||
sha2 = "0.10"
|
||||
hex = "0.4"
|
||||
|
||||
# Fetcher dependencies
|
||||
reqwest = { version = "0.12", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
tar = "0.4"
|
||||
flate2 = "1.0"
|
||||
xz2 = "0.1"
|
||||
bzip2 = "0.5"
|
||||
zip = "2.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
dirs = "5.0"
|
||||
tempfile = "3.24"
|
||||
|
||||
rnix = "0.12"
|
||||
|
||||
nix-js-macros = { path = "../nix-js-macros" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.24"
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
|
||||
[[bench]]
|
||||
|
||||
@@ -14,6 +14,7 @@ fn main() {
|
||||
println!("cargo::rerun-if-changed=runtime-ts/src");
|
||||
println!("cargo::rerun-if-changed=runtime-ts/package.json");
|
||||
println!("cargo::rerun-if-changed=runtime-ts/tsconfig.json");
|
||||
println!("cargo::rerun-if-changed=runtime-ts/build.mjs");
|
||||
|
||||
if !runtime_ts_dir.join("node_modules").exists() {
|
||||
println!("Installing npm dependencies...");
|
||||
|
||||
@@ -4,5 +4,5 @@ await esbuild.build({
|
||||
entryPoints: ["src/index.ts"],
|
||||
outfile: "dist/runtime.js",
|
||||
bundle: true,
|
||||
minify: true,
|
||||
// minify: true,
|
||||
});
|
||||
|
||||
@@ -22,11 +22,15 @@ export const hasAttr =
|
||||
export const mapAttrs =
|
||||
(f: NixValue) =>
|
||||
(attrs: NixValue): NixAttrs => {
|
||||
const new_attrs: NixAttrs = {};
|
||||
const forced_attrs = forceAttrs(attrs);
|
||||
const forced_f = forceFunction(f);
|
||||
const new_attrs: NixAttrs = {};
|
||||
for (const key in forced_attrs) {
|
||||
new_attrs[key] = forceFunction(forced_f(key))(forced_attrs[key]);
|
||||
Object.defineProperty(new_attrs, key, {
|
||||
get: () => forceFunction(forced_f(key))(forced_attrs[key]),
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
});
|
||||
}
|
||||
return new_attrs;
|
||||
};
|
||||
|
||||
@@ -12,8 +12,51 @@ import {
|
||||
} from "../string-context";
|
||||
import { forceFunction } from "../type-assert";
|
||||
|
||||
export const fromJSON = (e: NixValue): never => {
|
||||
throw new Error("Not implemented: fromJSON");
|
||||
const convertJsonToNix = (json: unknown): NixValue => {
|
||||
if (json === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof json === "boolean") {
|
||||
return json;
|
||||
}
|
||||
if (typeof json === "number") {
|
||||
if (Number.isInteger(json)) {
|
||||
return BigInt(json);
|
||||
}
|
||||
return json;
|
||||
}
|
||||
if (typeof json === "string") {
|
||||
return json;
|
||||
}
|
||||
if (Array.isArray(json)) {
|
||||
return json.map(convertJsonToNix);
|
||||
}
|
||||
if (typeof json === "object") {
|
||||
const result: Record<string, NixValue> = {};
|
||||
for (const [key, value] of Object.entries(json)) {
|
||||
result[key] = convertJsonToNix(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
throw new TypeError(`unsupported JSON value type: ${typeof json}`);
|
||||
};
|
||||
|
||||
export const fromJSON = (e: NixValue): NixValue => {
|
||||
const str = force(e);
|
||||
if (typeof str !== "string" && !isStringWithContext(str)) {
|
||||
throw new TypeError(
|
||||
`builtins.fromJSON: expected a string, got ${typeName(str)}`,
|
||||
);
|
||||
}
|
||||
const jsonStr = isStringWithContext(str) ? str.value : str;
|
||||
try {
|
||||
const parsed = JSON.parse(jsonStr);
|
||||
return convertJsonToNix(parsed);
|
||||
} catch (err) {
|
||||
throw new SyntaxError(
|
||||
`builtins.fromJSON: ${err instanceof Error ? err.message : String(err)}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const fromTOML = (e: NixValue): never => {
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
* Implemented via Rust ops exposed through deno_core
|
||||
*/
|
||||
|
||||
import { forceString } from "../type-assert";
|
||||
import type { NixValue } from "../types";
|
||||
import { forceAttrs, forceBool, forceString } from "../type-assert";
|
||||
import type { NixValue, NixAttrs } from "../types";
|
||||
import { force } from "../thunk";
|
||||
|
||||
// Declare Deno.core.ops global (provided by deno_core runtime)
|
||||
|
||||
@@ -33,24 +34,209 @@ export const fetchClosure = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchClosure");
|
||||
};
|
||||
|
||||
export const fetchMercurial = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchMercurial");
|
||||
interface FetchUrlResult {
|
||||
store_path: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
interface FetchTarballResult {
|
||||
store_path: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
interface FetchGitResult {
|
||||
out_path: string;
|
||||
rev: string;
|
||||
short_rev: string;
|
||||
rev_count: number;
|
||||
last_modified: number;
|
||||
last_modified_date: string;
|
||||
submodules: boolean;
|
||||
nar_hash: string | null;
|
||||
}
|
||||
|
||||
interface FetchHgResult {
|
||||
out_path: string;
|
||||
branch: string;
|
||||
rev: string;
|
||||
short_rev: string;
|
||||
rev_count: number;
|
||||
}
|
||||
|
||||
const normalizeUrlInput = (
|
||||
args: NixValue,
|
||||
): { url: string; hash?: string; name?: string; executable?: boolean } => {
|
||||
const forced = force(args);
|
||||
if (typeof forced === "string") {
|
||||
return { url: forced };
|
||||
}
|
||||
const attrs = forceAttrs(args);
|
||||
const url = forceString(attrs.url);
|
||||
const hash =
|
||||
"sha256" in attrs
|
||||
? forceString(attrs.sha256)
|
||||
: "hash" in attrs
|
||||
? forceString(attrs.hash)
|
||||
: undefined;
|
||||
const name = "name" in attrs ? forceString(attrs.name) : undefined;
|
||||
const executable = "executable" in attrs ? forceBool(attrs.executable) : false;
|
||||
return { url, hash, name, executable };
|
||||
};
|
||||
|
||||
export const fetchGit = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchGit");
|
||||
export const fetchurl = (args: NixValue): string => {
|
||||
const { url, hash, name, executable } = normalizeUrlInput(args);
|
||||
const result: FetchUrlResult = Deno.core.ops.op_fetch_url(
|
||||
url,
|
||||
hash ?? null,
|
||||
name ?? null,
|
||||
executable ?? false,
|
||||
);
|
||||
return result.store_path;
|
||||
};
|
||||
|
||||
export const fetchTarball = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchTarball");
|
||||
export const fetchTarball = (args: NixValue): string => {
|
||||
const { url, hash, name } = normalizeUrlInput(args);
|
||||
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(
|
||||
url,
|
||||
hash ?? null,
|
||||
name ?? null,
|
||||
);
|
||||
return result.store_path;
|
||||
};
|
||||
|
||||
export const fetchTree = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchTree");
|
||||
export const fetchGit = (args: NixValue): NixAttrs => {
|
||||
const forced = force(args);
|
||||
if (typeof forced === "string") {
|
||||
const result: FetchGitResult = Deno.core.ops.op_fetch_git(
|
||||
forced,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
);
|
||||
return {
|
||||
outPath: result.out_path,
|
||||
rev: result.rev,
|
||||
shortRev: result.short_rev,
|
||||
revCount: BigInt(result.rev_count),
|
||||
lastModified: BigInt(result.last_modified),
|
||||
lastModifiedDate: result.last_modified_date,
|
||||
submodules: result.submodules,
|
||||
narHash: result.nar_hash,
|
||||
};
|
||||
}
|
||||
const attrs = forceAttrs(args);
|
||||
const url = forceString(attrs.url);
|
||||
const gitRef = "ref" in attrs ? forceString(attrs.ref) : null;
|
||||
const rev = "rev" in attrs ? forceString(attrs.rev) : null;
|
||||
const shallow = "shallow" in attrs ? forceBool(attrs.shallow) : false;
|
||||
const submodules = "submodules" in attrs ? forceBool(attrs.submodules) : false;
|
||||
const allRefs = "allRefs" in attrs ? forceBool(attrs.allRefs) : false;
|
||||
const name = "name" in attrs ? forceString(attrs.name) : null;
|
||||
|
||||
const result: FetchGitResult = Deno.core.ops.op_fetch_git(
|
||||
url,
|
||||
gitRef,
|
||||
rev,
|
||||
shallow,
|
||||
submodules,
|
||||
allRefs,
|
||||
name,
|
||||
);
|
||||
|
||||
return {
|
||||
outPath: result.out_path,
|
||||
rev: result.rev,
|
||||
shortRev: result.short_rev,
|
||||
revCount: BigInt(result.rev_count),
|
||||
lastModified: BigInt(result.last_modified),
|
||||
lastModifiedDate: result.last_modified_date,
|
||||
submodules: result.submodules,
|
||||
narHash: result.nar_hash,
|
||||
};
|
||||
};
|
||||
|
||||
export const fetchurl = (args: NixValue): never => {
|
||||
throw new Error("Not implemented: fetchurl");
|
||||
export const fetchMercurial = (args: NixValue): NixAttrs => {
|
||||
const attrs = forceAttrs(args);
|
||||
const url = forceString(attrs.url);
|
||||
const rev = "rev" in attrs ? forceString(attrs.rev) : null;
|
||||
const name = "name" in attrs ? forceString(attrs.name) : null;
|
||||
|
||||
const result: FetchHgResult = Deno.core.ops.op_fetch_hg(url, rev, name);
|
||||
|
||||
return {
|
||||
outPath: result.out_path,
|
||||
branch: result.branch,
|
||||
rev: result.rev,
|
||||
shortRev: result.short_rev,
|
||||
revCount: BigInt(result.rev_count),
|
||||
};
|
||||
};
|
||||
|
||||
export const fetchTree = (args: NixValue): NixAttrs => {
|
||||
const attrs = forceAttrs(args);
|
||||
const type = "type" in attrs ? forceString(attrs.type) : "auto";
|
||||
|
||||
switch (type) {
|
||||
case "git":
|
||||
return fetchGit(args);
|
||||
case "hg":
|
||||
case "mercurial":
|
||||
return fetchMercurial(args);
|
||||
case "tarball":
|
||||
return { outPath: fetchTarball(args) };
|
||||
case "file":
|
||||
return { outPath: fetchurl(args) };
|
||||
case "path": {
|
||||
const path = forceString(attrs.path);
|
||||
return { outPath: path };
|
||||
}
|
||||
case "github":
|
||||
case "gitlab":
|
||||
case "sourcehut":
|
||||
return fetchGitForge(type, attrs);
|
||||
case "auto":
|
||||
default:
|
||||
return autoDetectAndFetch(attrs);
|
||||
}
|
||||
};
|
||||
|
||||
const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => {
|
||||
const owner = forceString(attrs.owner);
|
||||
const repo = forceString(attrs.repo);
|
||||
const rev =
|
||||
"rev" in attrs
|
||||
? forceString(attrs.rev)
|
||||
: "ref" in attrs
|
||||
? forceString(attrs.ref)
|
||||
: "HEAD";
|
||||
|
||||
const baseUrls: Record<string, string> = {
|
||||
github: "https://github.com",
|
||||
gitlab: "https://gitlab.com",
|
||||
sourcehut: "https://git.sr.ht",
|
||||
};
|
||||
|
||||
const url = `${baseUrls[forge]}/${owner}/${repo}`;
|
||||
return fetchGit({ ...attrs, url, rev });
|
||||
};
|
||||
|
||||
const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
|
||||
const url = forceString(attrs.url);
|
||||
if (url.endsWith(".git") || url.includes("github.com") || url.includes("gitlab.com")) {
|
||||
return fetchGit(attrs);
|
||||
}
|
||||
if (
|
||||
url.endsWith(".tar.gz") ||
|
||||
url.endsWith(".tar.xz") ||
|
||||
url.endsWith(".tar.bz2") ||
|
||||
url.endsWith(".tgz")
|
||||
) {
|
||||
return { outPath: fetchTarball(attrs) };
|
||||
}
|
||||
return { outPath: fetchurl(attrs) };
|
||||
};
|
||||
|
||||
export const readDir = (path: NixValue): never => {
|
||||
|
||||
@@ -13,11 +13,21 @@ import {
|
||||
mergeContexts,
|
||||
mkStringWithContext,
|
||||
} from "./string-context";
|
||||
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
|
||||
|
||||
const isNixString = (v: unknown): v is NixString => {
|
||||
return typeof v === "string" || isStringWithContext(v);
|
||||
};
|
||||
|
||||
const canCoerceToString = (v: NixValue): boolean => {
|
||||
const forced = force(v);
|
||||
if (isNixString(forced)) return true;
|
||||
if (typeof forced === "object" && forced !== null && !Array.isArray(forced)) {
|
||||
if ("outPath" in forced || "__toString" in forced) return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Operator object exported as Nix.op
|
||||
* All operators referenced by codegen (e.g., Nix.op.add, Nix.op.eq)
|
||||
@@ -40,6 +50,12 @@ export const op = {
|
||||
return mkStringWithContext(strA + strB, mergeContexts(ctxA, ctxB));
|
||||
}
|
||||
|
||||
if (canCoerceToString(a) && canCoerceToString(b)) {
|
||||
const strA = coerceToString(a, StringCoercionMode.Interpolation, false);
|
||||
const strB = coerceToString(b, StringCoercionMode.Interpolation, false);
|
||||
return strA + strB;
|
||||
}
|
||||
|
||||
const [numA, numB] = coerceNumeric(forceNumeric(a), forceNumeric(b));
|
||||
return (numA as any) + (numB as any);
|
||||
},
|
||||
@@ -79,6 +95,34 @@ export const op = {
|
||||
return av === Number(bv);
|
||||
}
|
||||
|
||||
if (Array.isArray(av) && Array.isArray(bv)) {
|
||||
if (av.length !== bv.length) return false;
|
||||
for (let i = 0; i < av.length; i++) {
|
||||
if (!op.eq(av[i], bv[i])) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (
|
||||
typeof av === "object" &&
|
||||
av !== null &&
|
||||
!Array.isArray(av) &&
|
||||
typeof bv === "object" &&
|
||||
bv !== null &&
|
||||
!Array.isArray(bv) &&
|
||||
!isNixString(av) &&
|
||||
!isNixString(bv)
|
||||
) {
|
||||
const keysA = Object.keys(av);
|
||||
const keysB = Object.keys(bv);
|
||||
if (keysA.length !== keysB.length) return false;
|
||||
for (const key of keysA) {
|
||||
if (!(key in bv)) return false;
|
||||
if (!op.eq((av as NixAttrs)[key], (bv as NixAttrs)[key])) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return av === bv;
|
||||
},
|
||||
neq: (a: NixValue, b: NixValue): boolean => {
|
||||
|
||||
@@ -16,6 +16,11 @@ export const IS_THUNK = Symbol("is_thunk");
|
||||
*
|
||||
* A thunk wraps a function that produces a value when called.
|
||||
* Once evaluated, the result is cached to avoid recomputation.
|
||||
*
|
||||
* Thunk states:
|
||||
* - Unevaluated: func is defined, result is undefined
|
||||
* - Evaluating (blackhole): func is undefined, result is undefined
|
||||
* - Evaluated: func is undefined, result is defined
|
||||
*/
|
||||
export class NixThunk implements NixThunkInterface {
|
||||
[key: symbol]: any;
|
||||
@@ -43,23 +48,37 @@ export const isThunk = (value: unknown): value is NixThunkInterface => {
|
||||
* If the value is a thunk, evaluate it and cache the result
|
||||
* If already evaluated or not a thunk, return as-is
|
||||
*
|
||||
* Uses "blackhole" detection to catch infinite recursion:
|
||||
* - Before evaluating, set func to undefined (entering blackhole state)
|
||||
* - If we encounter a thunk with func=undefined and result=undefined, it's a blackhole
|
||||
*
|
||||
* @param value - Value to force (may be a thunk)
|
||||
* @returns The forced/evaluated value
|
||||
* @throws Error if infinite recursion is detected
|
||||
*/
|
||||
export const force = (value: NixValue): NixStrictValue => {
|
||||
if (!isThunk(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// Already evaluated - return cached result
|
||||
// Check if already evaluated or in blackhole state
|
||||
if (value.func === undefined) {
|
||||
return value.result!;
|
||||
// Blackhole: func is undefined but result is also undefined
|
||||
if (value.result === undefined) {
|
||||
throw new Error("infinite recursion encountered (blackhole)");
|
||||
}
|
||||
// Already evaluated - return cached result
|
||||
return value.result;
|
||||
}
|
||||
|
||||
// Evaluate and cache
|
||||
const result = force(value.func());
|
||||
value.result = result;
|
||||
// Save func and enter blackhole state BEFORE calling func()
|
||||
const func = value.func;
|
||||
value.func = undefined;
|
||||
// result stays undefined - this is the blackhole state
|
||||
|
||||
// Evaluate and cache
|
||||
const result = force(func());
|
||||
value.result = result;
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
54
nix-js/runtime-ts/src/types/global.d.ts
vendored
54
nix-js/runtime-ts/src/types/global.d.ts
vendored
@@ -1,5 +1,34 @@
|
||||
import type { NixRuntime } from "..";
|
||||
|
||||
interface FetchUrlResult {
|
||||
store_path: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
interface FetchTarballResult {
|
||||
store_path: string;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
interface FetchGitResult {
|
||||
out_path: string;
|
||||
rev: string;
|
||||
short_rev: string;
|
||||
rev_count: number;
|
||||
last_modified: number;
|
||||
last_modified_date: string;
|
||||
submodules: boolean;
|
||||
nar_hash: string | null;
|
||||
}
|
||||
|
||||
interface FetchHgResult {
|
||||
out_path: string;
|
||||
branch: string;
|
||||
rev: string;
|
||||
short_rev: string;
|
||||
rev_count: number;
|
||||
}
|
||||
|
||||
declare global {
|
||||
var Nix: NixRuntime;
|
||||
namespace Deno {
|
||||
@@ -18,6 +47,31 @@ declare global {
|
||||
hash_mode: string,
|
||||
name: string,
|
||||
): string;
|
||||
function op_fetch_url(
|
||||
url: string,
|
||||
expected_hash: string | null,
|
||||
name: string | null,
|
||||
executable: boolean,
|
||||
): FetchUrlResult;
|
||||
function op_fetch_tarball(
|
||||
url: string,
|
||||
expected_hash: string | null,
|
||||
name: string | null,
|
||||
): FetchTarballResult;
|
||||
function op_fetch_git(
|
||||
url: string,
|
||||
ref: string | null,
|
||||
rev: string | null,
|
||||
shallow: boolean,
|
||||
submodules: boolean,
|
||||
all_refs: boolean,
|
||||
name: string | null,
|
||||
): FetchGitResult;
|
||||
function op_fetch_hg(
|
||||
url: string,
|
||||
rev: string | null,
|
||||
name: string | null,
|
||||
): FetchHgResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,8 +9,14 @@ pub enum ErrorKind {
|
||||
ParseError(String),
|
||||
#[error("error occurred during downgrade stage: {0}")]
|
||||
DowngradeError(String),
|
||||
#[error("error occurred during evaluation stage: {0}")]
|
||||
EvalError(String),
|
||||
#[error(
|
||||
"error occurred during evaluation stage: {msg}{}",
|
||||
backtrace.as_ref().map_or("".into(), |backtrace| format!("\nBacktrace: {backtrace}"))
|
||||
)]
|
||||
EvalError {
|
||||
msg: String,
|
||||
backtrace: Option<String>,
|
||||
},
|
||||
#[error("internal error occurred: {0}")]
|
||||
InternalError(String),
|
||||
#[error("{0}")]
|
||||
@@ -114,8 +120,11 @@ impl Error {
|
||||
pub fn downgrade_error(msg: String) -> Self {
|
||||
Self::new(ErrorKind::DowngradeError(msg))
|
||||
}
|
||||
pub fn eval_error(msg: String) -> Self {
|
||||
Self::new(ErrorKind::EvalError(msg))
|
||||
pub fn eval_error(msg: String, backtrace: Option<String>) -> Self {
|
||||
Self::new(ErrorKind::EvalError {
|
||||
msg,
|
||||
backtrace
|
||||
})
|
||||
}
|
||||
pub fn internal(msg: String) -> Self {
|
||||
Self::new(ErrorKind::InternalError(msg))
|
||||
|
||||
214
nix-js/src/fetcher/archive.rs
Normal file
214
nix-js/src/fetcher/archive.rs
Normal file
@@ -0,0 +1,214 @@
|
||||
use std::fs::{self, File};
|
||||
use std::io::Cursor;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use flate2::read::GzDecoder;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum ArchiveFormat {
|
||||
TarGz,
|
||||
TarXz,
|
||||
TarBz2,
|
||||
Tar,
|
||||
Zip,
|
||||
}
|
||||
|
||||
impl ArchiveFormat {
|
||||
pub fn detect(url: &str, data: &[u8]) -> Self {
|
||||
if url.ends_with(".tar.gz") || url.ends_with(".tgz") {
|
||||
return ArchiveFormat::TarGz;
|
||||
}
|
||||
if url.ends_with(".tar.xz") || url.ends_with(".txz") {
|
||||
return ArchiveFormat::TarXz;
|
||||
}
|
||||
if url.ends_with(".tar.bz2") || url.ends_with(".tbz2") {
|
||||
return ArchiveFormat::TarBz2;
|
||||
}
|
||||
if url.ends_with(".tar") {
|
||||
return ArchiveFormat::Tar;
|
||||
}
|
||||
if url.ends_with(".zip") {
|
||||
return ArchiveFormat::Zip;
|
||||
}
|
||||
|
||||
if data.len() >= 2 && data[0] == 0x1f && data[1] == 0x8b {
|
||||
return ArchiveFormat::TarGz;
|
||||
}
|
||||
if data.len() >= 6 && &data[0..6] == b"\xfd7zXZ\x00" {
|
||||
return ArchiveFormat::TarXz;
|
||||
}
|
||||
if data.len() >= 3 && &data[0..3] == b"BZh" {
|
||||
return ArchiveFormat::TarBz2;
|
||||
}
|
||||
if data.len() >= 4 && &data[0..4] == b"PK\x03\x04" {
|
||||
return ArchiveFormat::Zip;
|
||||
}
|
||||
|
||||
ArchiveFormat::TarGz
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_archive(data: &[u8], dest: &PathBuf) -> Result<PathBuf, ArchiveError> {
|
||||
let format = ArchiveFormat::detect("", data);
|
||||
|
||||
let temp_dir = dest.join("_extract_temp");
|
||||
fs::create_dir_all(&temp_dir)?;
|
||||
|
||||
match format {
|
||||
ArchiveFormat::TarGz => extract_tar_gz(data, &temp_dir)?,
|
||||
ArchiveFormat::TarXz => extract_tar_xz(data, &temp_dir)?,
|
||||
ArchiveFormat::TarBz2 => extract_tar_bz2(data, &temp_dir)?,
|
||||
ArchiveFormat::Tar => extract_tar(data, &temp_dir)?,
|
||||
ArchiveFormat::Zip => extract_zip(data, &temp_dir)?,
|
||||
}
|
||||
|
||||
strip_single_toplevel(&temp_dir, dest)
|
||||
}
|
||||
|
||||
fn extract_tar_gz(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||
let decoder = GzDecoder::new(Cursor::new(data));
|
||||
let mut archive = tar::Archive::new(decoder);
|
||||
archive.unpack(dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_tar_xz(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||
let decoder = xz2::read::XzDecoder::new(Cursor::new(data));
|
||||
let mut archive = tar::Archive::new(decoder);
|
||||
archive.unpack(dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_tar_bz2(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||
let decoder = bzip2::read::BzDecoder::new(Cursor::new(data));
|
||||
let mut archive = tar::Archive::new(decoder);
|
||||
archive.unpack(dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_tar(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||
let mut archive = tar::Archive::new(Cursor::new(data));
|
||||
archive.unpack(dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_zip(data: &[u8], dest: &PathBuf) -> Result<(), ArchiveError> {
|
||||
let cursor = Cursor::new(data);
|
||||
let mut archive = zip::ZipArchive::new(cursor)?;
|
||||
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
let outpath = dest.join(file.mangled_name());
|
||||
|
||||
if file.is_dir() {
|
||||
fs::create_dir_all(&outpath)?;
|
||||
} else {
|
||||
if let Some(parent) = outpath.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let mut outfile = File::create(&outpath)?;
|
||||
std::io::copy(&mut file, &mut outfile)?;
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
if let Some(mode) = file.unix_mode() {
|
||||
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn strip_single_toplevel(temp_dir: &PathBuf, dest: &PathBuf) -> Result<PathBuf, ArchiveError> {
|
||||
let entries: Vec<_> = fs::read_dir(temp_dir)?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| !e.file_name().to_string_lossy().starts_with('.'))
|
||||
.collect();
|
||||
|
||||
let source_dir = if entries.len() == 1 && entries[0].file_type()?.is_dir() {
|
||||
entries[0].path()
|
||||
} else {
|
||||
temp_dir.clone()
|
||||
};
|
||||
|
||||
let final_dest = dest.join("content");
|
||||
if final_dest.exists() {
|
||||
fs::remove_dir_all(&final_dest)?;
|
||||
}
|
||||
|
||||
if source_dir == *temp_dir {
|
||||
fs::rename(temp_dir, &final_dest)?;
|
||||
} else {
|
||||
copy_dir_recursive(&source_dir, &final_dest)?;
|
||||
fs::remove_dir_all(temp_dir)?;
|
||||
}
|
||||
|
||||
Ok(final_dest)
|
||||
}
|
||||
|
||||
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), std::io::Error> {
|
||||
fs::create_dir_all(dst)?;
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dest_path = dst.join(entry.file_name());
|
||||
let metadata = fs::symlink_metadata(&path)?;
|
||||
|
||||
if metadata.is_symlink() {
|
||||
let target = fs::read_link(&path)?;
|
||||
#[cfg(unix)]
|
||||
{
|
||||
std::os::unix::fs::symlink(&target, &dest_path)?;
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if target.is_dir() {
|
||||
std::os::windows::fs::symlink_dir(&target, &dest_path)?;
|
||||
} else {
|
||||
std::os::windows::fs::symlink_file(&target, &dest_path)?;
|
||||
}
|
||||
}
|
||||
} else if metadata.is_dir() {
|
||||
copy_dir_recursive(&path, &dest_path)?;
|
||||
} else {
|
||||
fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ArchiveError {
|
||||
IoError(std::io::Error),
|
||||
ZipError(zip::result::ZipError),
|
||||
UnsupportedFormat(String),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ArchiveError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ArchiveError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||
ArchiveError::ZipError(e) => write!(f, "ZIP error: {}", e),
|
||||
ArchiveError::UnsupportedFormat(fmt) => write!(f, "Unsupported archive format: {}", fmt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ArchiveError {}
|
||||
|
||||
impl From<std::io::Error> for ArchiveError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
ArchiveError::IoError(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<zip::result::ZipError> for ArchiveError {
|
||||
fn from(e: zip::result::ZipError) -> Self {
|
||||
ArchiveError::ZipError(e)
|
||||
}
|
||||
}
|
||||
277
nix-js/src/fetcher/cache.rs
Normal file
277
nix-js/src/fetcher/cache.rs
Normal file
@@ -0,0 +1,277 @@
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::archive::ArchiveError;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CacheError {
|
||||
Io(std::io::Error),
|
||||
Archive(ArchiveError),
|
||||
Json(serde_json::Error),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for CacheError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
CacheError::Io(e) => write!(f, "I/O error: {}", e),
|
||||
CacheError::Archive(e) => write!(f, "Archive error: {}", e),
|
||||
CacheError::Json(e) => write!(f, "JSON error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for CacheError {}
|
||||
|
||||
impl From<std::io::Error> for CacheError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
CacheError::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ArchiveError> for CacheError {
|
||||
fn from(e: ArchiveError) -> Self {
|
||||
CacheError::Archive(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::Error> for CacheError {
|
||||
fn from(e: serde_json::Error) -> Self {
|
||||
CacheError::Json(e)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FetcherCache {
|
||||
base_dir: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct CacheMetadata {
|
||||
url: String,
|
||||
hash: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl FetcherCache {
|
||||
pub fn new() -> Result<Self, std::io::Error> {
|
||||
let base_dir = dirs::cache_dir()
|
||||
.unwrap_or_else(|| PathBuf::from("/tmp"))
|
||||
.join("nix-js")
|
||||
.join("fetchers");
|
||||
|
||||
fs::create_dir_all(&base_dir)?;
|
||||
|
||||
Ok(Self { base_dir })
|
||||
}
|
||||
|
||||
fn url_cache_dir(&self) -> PathBuf {
|
||||
self.base_dir.join("url")
|
||||
}
|
||||
|
||||
fn tarball_cache_dir(&self) -> PathBuf {
|
||||
self.base_dir.join("tarball")
|
||||
}
|
||||
|
||||
fn git_cache_dir(&self) -> PathBuf {
|
||||
self.base_dir.join("git")
|
||||
}
|
||||
|
||||
fn hg_cache_dir(&self) -> PathBuf {
|
||||
self.base_dir.join("hg")
|
||||
}
|
||||
|
||||
fn hash_key(url: &str) -> String {
|
||||
crate::nix_hash::sha256_hex(url)
|
||||
}
|
||||
|
||||
pub fn get_url(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
|
||||
let cache_dir = self.url_cache_dir();
|
||||
let key = Self::hash_key(url);
|
||||
let meta_path = cache_dir.join(format!("{}.meta", key));
|
||||
let data_path = cache_dir.join(format!("{}.data", key));
|
||||
|
||||
if !meta_path.exists() || !data_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let meta: CacheMetadata = serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
|
||||
|
||||
if meta.hash == expected_hash {
|
||||
Some(data_path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn put_url(
|
||||
&self,
|
||||
url: &str,
|
||||
hash: &str,
|
||||
data: &[u8],
|
||||
name: &str,
|
||||
executable: bool,
|
||||
) -> Result<PathBuf, std::io::Error> {
|
||||
let cache_dir = self.url_cache_dir();
|
||||
fs::create_dir_all(&cache_dir)?;
|
||||
|
||||
let key = Self::hash_key(url);
|
||||
let meta_path = cache_dir.join(format!("{}.meta", key));
|
||||
let data_path = cache_dir.join(format!("{}.data", key));
|
||||
|
||||
let mut file = File::create(&data_path)?;
|
||||
file.write_all(data)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
if executable {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = fs::metadata(&data_path)?.permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&data_path, perms)?;
|
||||
}
|
||||
|
||||
let meta = CacheMetadata {
|
||||
url: url.to_string(),
|
||||
hash: hash.to_string(),
|
||||
name: name.to_string(),
|
||||
};
|
||||
fs::write(&meta_path, serde_json::to_string(&meta)?)?;
|
||||
|
||||
let store_path = self.make_store_path(hash, name);
|
||||
if !store_path.exists() {
|
||||
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||
fs::copy(&data_path, &store_path)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
if executable {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut perms = fs::metadata(&store_path)?.permissions();
|
||||
perms.set_mode(0o755);
|
||||
fs::set_permissions(&store_path, perms)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(store_path)
|
||||
}
|
||||
|
||||
pub fn get_tarball(&self, url: &str, expected_hash: &str) -> Option<PathBuf> {
|
||||
let cache_dir = self.tarball_cache_dir();
|
||||
let key = Self::hash_key(url);
|
||||
let meta_path = cache_dir.join(&key).join(".meta");
|
||||
let data_dir = cache_dir.join(&key);
|
||||
|
||||
if !meta_path.exists() || !data_dir.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let meta: CacheMetadata = serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
|
||||
|
||||
if meta.hash == expected_hash {
|
||||
Some(self.make_store_path(&meta.hash, &meta.name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn put_tarball(
|
||||
&self,
|
||||
url: &str,
|
||||
hash: &str,
|
||||
data: &[u8],
|
||||
name: &str,
|
||||
) -> Result<PathBuf, CacheError> {
|
||||
let cache_dir = self.tarball_cache_dir();
|
||||
let key = Self::hash_key(url);
|
||||
let extract_dir = cache_dir.join(&key);
|
||||
|
||||
fs::create_dir_all(&extract_dir)?;
|
||||
|
||||
let extracted_path = super::archive::extract_archive(data, &extract_dir)?;
|
||||
|
||||
let meta = CacheMetadata {
|
||||
url: url.to_string(),
|
||||
hash: hash.to_string(),
|
||||
name: name.to_string(),
|
||||
};
|
||||
fs::write(extract_dir.join(".meta"), serde_json::to_string(&meta)?)?;
|
||||
|
||||
let store_path = self.make_store_path(hash, name);
|
||||
if !store_path.exists() {
|
||||
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||
copy_dir_recursive(&extracted_path, &store_path)?;
|
||||
}
|
||||
|
||||
Ok(store_path)
|
||||
}
|
||||
|
||||
pub fn put_tarball_from_extracted(
|
||||
&self,
|
||||
url: &str,
|
||||
hash: &str,
|
||||
extracted_path: &PathBuf,
|
||||
name: &str,
|
||||
) -> Result<PathBuf, CacheError> {
|
||||
let cache_dir = self.tarball_cache_dir();
|
||||
let key = Self::hash_key(url);
|
||||
let cache_entry_dir = cache_dir.join(&key);
|
||||
|
||||
fs::create_dir_all(&cache_entry_dir)?;
|
||||
|
||||
let cached_content = cache_entry_dir.join("content");
|
||||
if !cached_content.exists() {
|
||||
copy_dir_recursive(extracted_path, &cached_content)?;
|
||||
}
|
||||
|
||||
let meta = CacheMetadata {
|
||||
url: url.to_string(),
|
||||
hash: hash.to_string(),
|
||||
name: name.to_string(),
|
||||
};
|
||||
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
|
||||
|
||||
let store_path = self.make_store_path(hash, name);
|
||||
if !store_path.exists() {
|
||||
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
|
||||
copy_dir_recursive(extracted_path, &store_path)?;
|
||||
}
|
||||
|
||||
Ok(store_path)
|
||||
}
|
||||
|
||||
pub fn get_git_bare(&self, url: &str) -> PathBuf {
|
||||
let key = Self::hash_key(url);
|
||||
self.git_cache_dir().join(key)
|
||||
}
|
||||
|
||||
pub fn get_hg_bare(&self, url: &str) -> PathBuf {
|
||||
let key = Self::hash_key(url);
|
||||
self.hg_cache_dir().join(key)
|
||||
}
|
||||
|
||||
pub fn make_store_path(&self, hash: &str, name: &str) -> PathBuf {
|
||||
let short_hash = &hash[..32.min(hash.len())];
|
||||
self.base_dir
|
||||
.join("store")
|
||||
.join(format!("{}-{}", short_hash, name))
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_dir_recursive(src: &PathBuf, dst: &PathBuf) -> Result<(), std::io::Error> {
|
||||
fs::create_dir_all(dst)?;
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dest_path = dst.join(entry.file_name());
|
||||
|
||||
if path.is_dir() {
|
||||
copy_dir_recursive(&path, &dest_path)?;
|
||||
} else {
|
||||
fs::copy(&path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
63
nix-js/src/fetcher/download.rs
Normal file
63
nix-js/src/fetcher/download.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use reqwest::blocking::Client;
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct Downloader {
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl Downloader {
|
||||
pub fn new() -> Self {
|
||||
let client = Client::builder()
|
||||
.timeout(Duration::from_secs(300))
|
||||
.user_agent("nix-js/0.1")
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
|
||||
Self { client }
|
||||
}
|
||||
|
||||
pub fn download(&self, url: &str) -> Result<Vec<u8>, DownloadError> {
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.send()
|
||||
.map_err(|e| DownloadError::NetworkError(e.to_string()))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(DownloadError::HttpError {
|
||||
url: url.to_string(),
|
||||
status: response.status().as_u16(),
|
||||
});
|
||||
}
|
||||
|
||||
response
|
||||
.bytes()
|
||||
.map(|b| b.to_vec())
|
||||
.map_err(|e| DownloadError::NetworkError(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Downloader {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DownloadError {
|
||||
NetworkError(String),
|
||||
HttpError { url: String, status: u16 },
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DownloadError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DownloadError::NetworkError(msg) => write!(f, "Network error: {}", msg),
|
||||
DownloadError::HttpError { url, status } => {
|
||||
write!(f, "HTTP error {} for URL: {}", status, url)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for DownloadError {}
|
||||
303
nix-js/src/fetcher/git.rs
Normal file
303
nix-js/src/fetcher/git.rs
Normal file
@@ -0,0 +1,303 @@
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use super::cache::FetcherCache;
|
||||
use super::FetchGitResult;
|
||||
|
||||
pub fn fetch_git(
|
||||
cache: &FetcherCache,
|
||||
url: &str,
|
||||
git_ref: Option<&str>,
|
||||
rev: Option<&str>,
|
||||
_shallow: bool,
|
||||
submodules: bool,
|
||||
all_refs: bool,
|
||||
name: &str,
|
||||
) -> Result<FetchGitResult, GitError> {
|
||||
let bare_repo = cache.get_git_bare(url);
|
||||
|
||||
if !bare_repo.exists() {
|
||||
clone_bare(url, &bare_repo)?;
|
||||
} else {
|
||||
fetch_repo(&bare_repo, all_refs)?;
|
||||
}
|
||||
|
||||
let target_rev = resolve_rev(&bare_repo, git_ref, rev)?;
|
||||
let checkout_dir = checkout_rev(&bare_repo, &target_rev, submodules, name, cache)?;
|
||||
|
||||
let rev_count = get_rev_count(&bare_repo, &target_rev)?;
|
||||
let last_modified = get_last_modified(&bare_repo, &target_rev)?;
|
||||
let last_modified_date = format_timestamp(last_modified);
|
||||
|
||||
let short_rev = if target_rev.len() >= 7 {
|
||||
target_rev[..7].to_string()
|
||||
} else {
|
||||
target_rev.clone()
|
||||
};
|
||||
|
||||
Ok(FetchGitResult {
|
||||
out_path: checkout_dir.to_string_lossy().to_string(),
|
||||
rev: target_rev,
|
||||
short_rev,
|
||||
rev_count,
|
||||
last_modified,
|
||||
last_modified_date,
|
||||
submodules,
|
||||
nar_hash: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn clone_bare(url: &str, dest: &PathBuf) -> Result<(), GitError> {
|
||||
fs::create_dir_all(dest.parent().unwrap_or(dest))?;
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["clone", "--bare", url])
|
||||
.arg(dest)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(GitError::CommandFailed {
|
||||
operation: "clone".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn fetch_repo(repo: &PathBuf, all_refs: bool) -> Result<(), GitError> {
|
||||
let mut args = vec!["fetch", "--prune"];
|
||||
if all_refs {
|
||||
args.push("--all");
|
||||
}
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(args)
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(GitError::CommandFailed {
|
||||
operation: "fetch".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_rev(repo: &PathBuf, git_ref: Option<&str>, rev: Option<&str>) -> Result<String, GitError> {
|
||||
if let Some(rev) = rev {
|
||||
return Ok(rev.to_string());
|
||||
}
|
||||
|
||||
let ref_to_resolve = git_ref.unwrap_or("HEAD");
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", ref_to_resolve])
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", &format!("refs/heads/{}", ref_to_resolve)])
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
let output = Command::new("git")
|
||||
.args(["rev-parse", &format!("refs/tags/{}", ref_to_resolve)])
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(GitError::CommandFailed {
|
||||
operation: "rev-parse".to_string(),
|
||||
message: format!("Could not resolve ref: {}", ref_to_resolve),
|
||||
});
|
||||
}
|
||||
return Ok(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||
}
|
||||
return Ok(String::from_utf8_lossy(&output.stdout).trim().to_string());
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
}
|
||||
|
||||
fn checkout_rev(
|
||||
bare_repo: &PathBuf,
|
||||
rev: &str,
|
||||
submodules: bool,
|
||||
name: &str,
|
||||
cache: &FetcherCache,
|
||||
) -> Result<PathBuf, GitError> {
|
||||
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||
let checkout_dir = cache.make_store_path(&hash, name);
|
||||
|
||||
if checkout_dir.exists() {
|
||||
return Ok(checkout_dir);
|
||||
}
|
||||
|
||||
fs::create_dir_all(&checkout_dir)?;
|
||||
|
||||
let output = Command::new("git")
|
||||
.args(["--work-tree", checkout_dir.to_str().unwrap_or(".")])
|
||||
.arg("checkout")
|
||||
.arg(rev)
|
||||
.arg("--")
|
||||
.arg(".")
|
||||
.current_dir(bare_repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
fs::remove_dir_all(&checkout_dir)?;
|
||||
return Err(GitError::CommandFailed {
|
||||
operation: "checkout".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if submodules {
|
||||
let output = Command::new("git")
|
||||
.args(["submodule", "update", "--init", "--recursive"])
|
||||
.current_dir(&checkout_dir)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
eprintln!(
|
||||
"Warning: failed to initialize submodules: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let git_dir = checkout_dir.join(".git");
|
||||
if git_dir.exists() {
|
||||
fs::remove_dir_all(&git_dir)?;
|
||||
}
|
||||
|
||||
Ok(checkout_dir)
|
||||
}
|
||||
|
||||
fn get_rev_count(repo: &PathBuf, rev: &str) -> Result<u64, GitError> {
|
||||
let output = Command::new("git")
|
||||
.args(["rev-list", "--count", rev])
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let count_str = String::from_utf8_lossy(&output.stdout);
|
||||
count_str.trim().parse().unwrap_or(0).pipe(Ok)
|
||||
}
|
||||
|
||||
fn get_last_modified(repo: &PathBuf, rev: &str) -> Result<u64, GitError> {
|
||||
let output = Command::new("git")
|
||||
.args(["log", "-1", "--format=%ct", rev])
|
||||
.current_dir(repo)
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let ts_str = String::from_utf8_lossy(&output.stdout);
|
||||
ts_str.trim().parse().unwrap_or(0).pipe(Ok)
|
||||
}
|
||||
|
||||
fn format_timestamp(ts: u64) -> String {
|
||||
use std::time::{Duration, UNIX_EPOCH};
|
||||
|
||||
let datetime = UNIX_EPOCH + Duration::from_secs(ts);
|
||||
let secs = datetime
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.map(|d| d.as_secs())
|
||||
.unwrap_or(0);
|
||||
|
||||
let days_since_epoch = secs / 86400;
|
||||
let remaining_secs = secs % 86400;
|
||||
let hours = remaining_secs / 3600;
|
||||
let minutes = (remaining_secs % 3600) / 60;
|
||||
let seconds = remaining_secs % 60;
|
||||
|
||||
let (year, month, day) = days_to_ymd(days_since_epoch);
|
||||
|
||||
format!(
|
||||
"{:04}{:02}{:02}{:02}{:02}{:02}",
|
||||
year, month, day, hours, minutes, seconds
|
||||
)
|
||||
}
|
||||
|
||||
fn days_to_ymd(days: u64) -> (u64, u64, u64) {
|
||||
let mut y = 1970;
|
||||
let mut remaining = days as i64;
|
||||
|
||||
loop {
|
||||
let days_in_year = if is_leap_year(y) { 366 } else { 365 };
|
||||
if remaining < days_in_year {
|
||||
break;
|
||||
}
|
||||
remaining -= days_in_year;
|
||||
y += 1;
|
||||
}
|
||||
|
||||
let days_in_months: [i64; 12] = if is_leap_year(y) {
|
||||
[31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
||||
} else {
|
||||
[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
||||
};
|
||||
|
||||
let mut m = 1;
|
||||
for days_in_month in days_in_months.iter() {
|
||||
if remaining < *days_in_month {
|
||||
break;
|
||||
}
|
||||
remaining -= *days_in_month;
|
||||
m += 1;
|
||||
}
|
||||
|
||||
(y, m, (remaining + 1) as u64)
|
||||
}
|
||||
|
||||
fn is_leap_year(y: u64) -> bool {
|
||||
(y % 4 == 0 && y % 100 != 0) || (y % 400 == 0)
|
||||
}
|
||||
|
||||
trait Pipe: Sized {
|
||||
fn pipe<F, R>(self, f: F) -> R
|
||||
where
|
||||
F: FnOnce(Self) -> R,
|
||||
{
|
||||
f(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Pipe for T {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum GitError {
|
||||
IoError(std::io::Error),
|
||||
CommandFailed { operation: String, message: String },
|
||||
}
|
||||
|
||||
impl std::fmt::Display for GitError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
GitError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||
GitError::CommandFailed { operation, message } => {
|
||||
write!(f, "Git {} failed: {}", operation, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for GitError {}
|
||||
|
||||
impl From<std::io::Error> for GitError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
GitError::IoError(e)
|
||||
}
|
||||
}
|
||||
196
nix-js/src/fetcher/hg.rs
Normal file
196
nix-js/src/fetcher/hg.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use super::cache::FetcherCache;
|
||||
use super::FetchHgResult;
|
||||
|
||||
pub fn fetch_hg(
|
||||
cache: &FetcherCache,
|
||||
url: &str,
|
||||
rev: Option<&str>,
|
||||
name: &str,
|
||||
) -> Result<FetchHgResult, HgError> {
|
||||
let bare_repo = cache.get_hg_bare(url);
|
||||
|
||||
if !bare_repo.exists() {
|
||||
clone_repo(url, &bare_repo)?;
|
||||
} else {
|
||||
pull_repo(&bare_repo)?;
|
||||
}
|
||||
|
||||
let target_rev = rev.unwrap_or("tip").to_string();
|
||||
let resolved_rev = resolve_rev(&bare_repo, &target_rev)?;
|
||||
let branch = get_branch(&bare_repo, &resolved_rev)?;
|
||||
|
||||
let checkout_dir = checkout_rev(&bare_repo, &resolved_rev, name, cache)?;
|
||||
|
||||
let rev_count = get_rev_count(&bare_repo, &resolved_rev)?;
|
||||
|
||||
let short_rev = if resolved_rev.len() >= 12 {
|
||||
resolved_rev[..12].to_string()
|
||||
} else {
|
||||
resolved_rev.clone()
|
||||
};
|
||||
|
||||
Ok(FetchHgResult {
|
||||
out_path: checkout_dir.to_string_lossy().to_string(),
|
||||
branch,
|
||||
rev: resolved_rev,
|
||||
short_rev,
|
||||
rev_count,
|
||||
})
|
||||
}
|
||||
|
||||
fn clone_repo(url: &str, dest: &PathBuf) -> Result<(), HgError> {
|
||||
fs::create_dir_all(dest.parent().unwrap_or(dest))?;
|
||||
|
||||
let output = Command::new("hg")
|
||||
.args(["clone", "-U", url])
|
||||
.arg(dest)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(HgError::CommandFailed {
|
||||
operation: "clone".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pull_repo(repo: &PathBuf) -> Result<(), HgError> {
|
||||
let output = Command::new("hg")
|
||||
.args(["pull"])
|
||||
.current_dir(repo)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(HgError::CommandFailed {
|
||||
operation: "pull".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_rev(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
|
||||
let output = Command::new("hg")
|
||||
.args(["log", "-r", rev, "--template", "{node}"])
|
||||
.current_dir(repo)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(HgError::CommandFailed {
|
||||
operation: "log".to_string(),
|
||||
message: format!(
|
||||
"Could not resolve rev '{}': {}",
|
||||
rev,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
|
||||
}
|
||||
|
||||
fn get_branch(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
|
||||
let output = Command::new("hg")
|
||||
.args(["log", "-r", rev, "--template", "{branch}"])
|
||||
.current_dir(repo)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok("default".to_string());
|
||||
}
|
||||
|
||||
let branch = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
if branch.is_empty() {
|
||||
Ok("default".to_string())
|
||||
} else {
|
||||
Ok(branch)
|
||||
}
|
||||
}
|
||||
|
||||
fn checkout_rev(
|
||||
bare_repo: &PathBuf,
|
||||
rev: &str,
|
||||
name: &str,
|
||||
cache: &FetcherCache,
|
||||
) -> Result<PathBuf, HgError> {
|
||||
let hash = crate::nix_hash::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
|
||||
let checkout_dir = cache.make_store_path(&hash, name);
|
||||
|
||||
if checkout_dir.exists() {
|
||||
return Ok(checkout_dir);
|
||||
}
|
||||
|
||||
fs::create_dir_all(&checkout_dir)?;
|
||||
|
||||
let output = Command::new("hg")
|
||||
.args(["archive", "-r", rev])
|
||||
.arg(&checkout_dir)
|
||||
.current_dir(bare_repo)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
fs::remove_dir_all(&checkout_dir)?;
|
||||
return Err(HgError::CommandFailed {
|
||||
operation: "archive".to_string(),
|
||||
message: String::from_utf8_lossy(&output.stderr).to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let hg_archival = checkout_dir.join(".hg_archival.txt");
|
||||
if hg_archival.exists() {
|
||||
fs::remove_file(&hg_archival)?;
|
||||
}
|
||||
|
||||
Ok(checkout_dir)
|
||||
}
|
||||
|
||||
fn get_rev_count(repo: &PathBuf, rev: &str) -> Result<u64, HgError> {
|
||||
let output = Command::new("hg")
|
||||
.args(["log", "-r", &format!("0::{}", rev), "--template", "x"])
|
||||
.current_dir(repo)
|
||||
.env("HGPLAIN", "")
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
Ok(output.stdout.len() as u64)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum HgError {
|
||||
IoError(std::io::Error),
|
||||
CommandFailed { operation: String, message: String },
|
||||
}
|
||||
|
||||
impl std::fmt::Display for HgError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
HgError::IoError(e) => write!(f, "I/O error: {}", e),
|
||||
HgError::CommandFailed { operation, message } => {
|
||||
write!(f, "Mercurial {} failed: {}", operation, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for HgError {}
|
||||
|
||||
impl From<std::io::Error> for HgError {
|
||||
fn from(e: std::io::Error) -> Self {
|
||||
HgError::IoError(e)
|
||||
}
|
||||
}
|
||||
240
nix-js/src/fetcher/mod.rs
Normal file
240
nix-js/src/fetcher/mod.rs
Normal file
@@ -0,0 +1,240 @@
|
||||
mod archive;
|
||||
mod cache;
|
||||
mod download;
|
||||
mod git;
|
||||
mod hg;
|
||||
mod nar;
|
||||
|
||||
pub use cache::FetcherCache;
|
||||
pub use download::Downloader;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use deno_core::op2;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::runtime::NixError;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FetchUrlResult {
|
||||
pub store_path: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FetchTarballResult {
|
||||
pub store_path: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FetchGitResult {
|
||||
pub out_path: String,
|
||||
pub rev: String,
|
||||
pub short_rev: String,
|
||||
pub rev_count: u64,
|
||||
pub last_modified: u64,
|
||||
pub last_modified_date: String,
|
||||
pub submodules: bool,
|
||||
pub nar_hash: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct FetchHgResult {
|
||||
pub out_path: String,
|
||||
pub branch: String,
|
||||
pub rev: String,
|
||||
pub short_rev: String,
|
||||
pub rev_count: u64,
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_fetch_url(
|
||||
#[string] url: String,
|
||||
#[string] expected_hash: Option<String>,
|
||||
#[string] name: Option<String>,
|
||||
executable: bool,
|
||||
) -> Result<FetchUrlResult, NixError> {
|
||||
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||
let downloader = Downloader::new();
|
||||
|
||||
let file_name = name.unwrap_or_else(|| {
|
||||
url.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or("download")
|
||||
.to_string()
|
||||
});
|
||||
|
||||
if let Some(ref hash) = expected_hash {
|
||||
if let Some(cached) = cache.get_url(&url, hash) {
|
||||
return Ok(FetchUrlResult {
|
||||
store_path: cached.to_string_lossy().to_string(),
|
||||
hash: hash.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let data = downloader
|
||||
.download(&url)
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
|
||||
|
||||
if let Some(ref expected) = expected_hash {
|
||||
let normalized_expected = normalize_hash(expected);
|
||||
if hash != normalized_expected {
|
||||
return Err(NixError::from(format!(
|
||||
"hash mismatch for '{}': expected {}, got {}",
|
||||
url, normalized_expected, hash
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let store_path = cache
|
||||
.put_url(&url, &hash, &data, &file_name, executable)
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
Ok(FetchUrlResult {
|
||||
store_path: store_path.to_string_lossy().to_string(),
|
||||
hash,
|
||||
})
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_fetch_tarball(
|
||||
#[string] url: String,
|
||||
#[string] expected_hash: Option<String>,
|
||||
#[string] name: Option<String>,
|
||||
) -> Result<FetchTarballResult, NixError> {
|
||||
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||
let downloader = Downloader::new();
|
||||
|
||||
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||
|
||||
let is_nar_hash = expected_hash
|
||||
.as_ref()
|
||||
.map(|h| h.starts_with("sha256-"))
|
||||
.unwrap_or(false);
|
||||
|
||||
if let Some(ref hash) = expected_hash {
|
||||
let normalized = normalize_hash(hash);
|
||||
if let Some(cached) = cache.get_tarball(&url, &normalized) {
|
||||
return Ok(FetchTarballResult {
|
||||
store_path: cached.to_string_lossy().to_string(),
|
||||
hash: normalized,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let data = downloader
|
||||
.download(&url)
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
let temp_dir = tempfile::tempdir().map_err(|e| NixError::from(e.to_string()))?;
|
||||
let extracted_path = archive::extract_archive(&data, &temp_dir.path().to_path_buf())
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
let nar_hash = nar::compute_nar_hash(&extracted_path)
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
if let Some(ref expected) = expected_hash {
|
||||
let normalized_expected = normalize_hash(expected);
|
||||
let hash_to_compare = if is_nar_hash { &nar_hash } else { &nar_hash };
|
||||
|
||||
if *hash_to_compare != normalized_expected {
|
||||
return Err(NixError::from(format!(
|
||||
"hash mismatch for '{}': expected {}, got {}",
|
||||
url, normalized_expected, hash_to_compare
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let store_path = cache
|
||||
.put_tarball_from_extracted(&url, &nar_hash, &extracted_path, &dir_name)
|
||||
.map_err(|e| NixError::from(e.to_string()))?;
|
||||
|
||||
Ok(FetchTarballResult {
|
||||
store_path: store_path.to_string_lossy().to_string(),
|
||||
hash: nar_hash,
|
||||
})
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_fetch_git(
|
||||
#[string] url: String,
|
||||
#[string] git_ref: Option<String>,
|
||||
#[string] rev: Option<String>,
|
||||
shallow: bool,
|
||||
submodules: bool,
|
||||
all_refs: bool,
|
||||
#[string] name: Option<String>,
|
||||
) -> Result<FetchGitResult, NixError> {
|
||||
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||
|
||||
git::fetch_git(&cache, &url, git_ref.as_deref(), rev.as_deref(), shallow, submodules, all_refs, &dir_name)
|
||||
.map_err(|e| NixError::from(e.to_string()))
|
||||
}
|
||||
|
||||
#[op2]
|
||||
#[serde]
|
||||
pub fn op_fetch_hg(
|
||||
#[string] url: String,
|
||||
#[string] rev: Option<String>,
|
||||
#[string] name: Option<String>,
|
||||
) -> Result<FetchHgResult, NixError> {
|
||||
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
|
||||
let dir_name = name.unwrap_or_else(|| "source".to_string());
|
||||
|
||||
hg::fetch_hg(&cache, &url, rev.as_deref(), &dir_name)
|
||||
.map_err(|e| NixError::from(e.to_string()))
|
||||
}
|
||||
|
||||
fn normalize_hash(hash: &str) -> String {
|
||||
if hash.starts_with("sha256-") {
|
||||
if let Some(b64) = hash.strip_prefix("sha256-") {
|
||||
if let Ok(bytes) = base64_decode(b64) {
|
||||
return hex::encode(bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
hash.to_string()
|
||||
}
|
||||
|
||||
fn base64_decode(input: &str) -> Result<Vec<u8>, String> {
|
||||
const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
|
||||
let input = input.trim_end_matches('=');
|
||||
let mut output = Vec::with_capacity(input.len() * 3 / 4);
|
||||
|
||||
let mut buffer = 0u32;
|
||||
let mut bits = 0;
|
||||
|
||||
for c in input.bytes() {
|
||||
let value = ALPHABET.iter().position(|&x| x == c)
|
||||
.ok_or_else(|| format!("Invalid base64 character: {}", c as char))?;
|
||||
|
||||
buffer = (buffer << 6) | (value as u32);
|
||||
bits += 6;
|
||||
|
||||
if bits >= 8 {
|
||||
bits -= 8;
|
||||
output.push((buffer >> bits) as u8);
|
||||
buffer &= (1 << bits) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn register_ops() -> Vec<deno_core::OpDecl> {
|
||||
vec![
|
||||
op_fetch_url(),
|
||||
op_fetch_tarball(),
|
||||
op_fetch_git(),
|
||||
op_fetch_hg(),
|
||||
]
|
||||
}
|
||||
126
nix-js/src/fetcher/nar.rs
Normal file
126
nix-js/src/fetcher/nar.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs;
|
||||
use std::io::{self, Write};
|
||||
use std::path::Path;
|
||||
|
||||
pub fn compute_nar_hash(path: &Path) -> Result<String, io::Error> {
|
||||
let mut hasher = Sha256::new();
|
||||
dump_path(&mut hasher, path)?;
|
||||
Ok(hex::encode(hasher.finalize()))
|
||||
}
|
||||
|
||||
fn dump_path<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
|
||||
write_string(sink, "nix-archive-1")?;
|
||||
write_string(sink, "(")?;
|
||||
dump_entry(sink, path)?;
|
||||
write_string(sink, ")")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn dump_entry<W: Write>(sink: &mut W, path: &Path) -> io::Result<()> {
|
||||
let metadata = fs::symlink_metadata(path)?;
|
||||
|
||||
if metadata.is_symlink() {
|
||||
let target = fs::read_link(path)?;
|
||||
write_string(sink, "type")?;
|
||||
write_string(sink, "symlink")?;
|
||||
write_string(sink, "target")?;
|
||||
write_string(sink, &target.to_string_lossy())?;
|
||||
} else if metadata.is_file() {
|
||||
write_string(sink, "type")?;
|
||||
write_string(sink, "regular")?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
if metadata.permissions().mode() & 0o111 != 0 {
|
||||
write_string(sink, "executable")?;
|
||||
write_string(sink, "")?;
|
||||
}
|
||||
}
|
||||
|
||||
let contents = fs::read(path)?;
|
||||
write_string(sink, "contents")?;
|
||||
write_contents(sink, &contents)?;
|
||||
} else if metadata.is_dir() {
|
||||
write_string(sink, "type")?;
|
||||
write_string(sink, "directory")?;
|
||||
|
||||
let mut entries: Vec<_> = fs::read_dir(path)?
|
||||
.filter_map(|e| e.ok())
|
||||
.map(|e| e.file_name().to_string_lossy().to_string())
|
||||
.collect();
|
||||
entries.sort();
|
||||
|
||||
for name in entries {
|
||||
write_string(sink, "entry")?;
|
||||
write_string(sink, "(")?;
|
||||
write_string(sink, "name")?;
|
||||
write_string(sink, &name)?;
|
||||
write_string(sink, "node")?;
|
||||
write_string(sink, "(")?;
|
||||
dump_entry(sink, &path.join(&name))?;
|
||||
write_string(sink, ")")?;
|
||||
write_string(sink, ")")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_string<W: Write>(sink: &mut W, s: &str) -> io::Result<()> {
|
||||
let bytes = s.as_bytes();
|
||||
let len = bytes.len() as u64;
|
||||
|
||||
sink.write_all(&len.to_le_bytes())?;
|
||||
sink.write_all(bytes)?;
|
||||
|
||||
let padding = (8 - (len % 8)) % 8;
|
||||
for _ in 0..padding {
|
||||
sink.write_all(&[0])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_contents<W: Write>(sink: &mut W, contents: &[u8]) -> io::Result<()> {
|
||||
let len = contents.len() as u64;
|
||||
|
||||
sink.write_all(&len.to_le_bytes())?;
|
||||
sink.write_all(contents)?;
|
||||
|
||||
let padding = (8 - (len % 8)) % 8;
|
||||
for _ in 0..padding {
|
||||
sink.write_all(&[0])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_simple_file() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let file_path = temp.path().join("test.txt");
|
||||
fs::write(&file_path, "hello").unwrap();
|
||||
|
||||
let hash = compute_nar_hash(&file_path).unwrap();
|
||||
assert!(!hash.is_empty());
|
||||
assert_eq!(hash.len(), 64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_directory() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
fs::write(temp.path().join("a.txt"), "aaa").unwrap();
|
||||
fs::write(temp.path().join("b.txt"), "bbb").unwrap();
|
||||
|
||||
let hash = compute_nar_hash(temp.path()).unwrap();
|
||||
assert!(!hash.is_empty());
|
||||
assert_eq!(hash.len(), 64);
|
||||
}
|
||||
}
|
||||
@@ -134,7 +134,7 @@ pub fn downgrade_inherit(
|
||||
};
|
||||
match stcs.entry(ident) {
|
||||
Entry::Occupied(occupied) => {
|
||||
return Err(Error::eval_error(format!(
|
||||
return Err(Error::downgrade_error(format!(
|
||||
"attribute '{}' already defined",
|
||||
format_symbol(ctx.get_sym(*occupied.key()))
|
||||
)));
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
mod codegen;
|
||||
pub mod context;
|
||||
pub mod error;
|
||||
mod fetcher;
|
||||
mod ir;
|
||||
mod nix_hash;
|
||||
mod runtime;
|
||||
|
||||
@@ -23,7 +23,7 @@ pub(crate) trait RuntimeCtx: 'static {
|
||||
fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
|
||||
const ESM: &[ExtensionFileSource] =
|
||||
&deno_core::include_js_files!(nix_runtime dir "runtime-ts/dist", "runtime.js");
|
||||
let ops = vec![
|
||||
let mut ops = vec![
|
||||
op_import::<Ctx>(),
|
||||
op_read_file(),
|
||||
op_path_exists(),
|
||||
@@ -33,6 +33,7 @@ fn runtime_extension<Ctx: RuntimeCtx>() -> Extension {
|
||||
op_output_path_name(),
|
||||
op_make_fixed_output_path(),
|
||||
];
|
||||
ops.extend(crate::fetcher::register_ops());
|
||||
|
||||
Extension {
|
||||
name: "nix_runtime",
|
||||
@@ -69,7 +70,7 @@ mod private {
|
||||
}
|
||||
}
|
||||
}
|
||||
use private::NixError;
|
||||
pub(crate) use private::NixError;
|
||||
|
||||
#[deno_core::op2]
|
||||
#[string]
|
||||
@@ -220,7 +221,7 @@ impl<Ctx: RuntimeCtx> Runtime<Ctx> {
|
||||
let global_value = self
|
||||
.js_runtime
|
||||
.execute_script("<eval>", script)
|
||||
.map_err(|e| Error::eval_error(format!("{}", e.get_message())))?;
|
||||
.map_err(|e| Error::eval_error(format!("{}", e.get_message()), e.stack))?;
|
||||
|
||||
// Retrieve scope from JsRuntime
|
||||
deno_core::scope!(scope, self.js_runtime);
|
||||
|
||||
Reference in New Issue
Block a user