Compare commits

...

8 Commits

26 changed files with 1039 additions and 187 deletions

View File

@@ -1,6 +1,6 @@
import type { NixValue, NixAttrs, NixString } from "../types"; import type { NixValue, NixAttrs, NixString } from "../types";
import { isStringWithContext } from "../types"; import { isStringWithContext } from "../types";
import { forceNixString, forceAttrs, forceBool, forceList, forceString } from "../type-assert"; import { forceNixString, forceAttrs, forceList, forceString } from "../type-assert";
import { force } from "../thunk"; import { force } from "../thunk";
import { import {
type NixStringContext, type NixStringContext,

View File

@@ -130,7 +130,7 @@ export interface CoerceResult {
*/ */
export const coerceToString = ( export const coerceToString = (
value: NixValue, value: NixValue,
mode: StringCoercionMode = StringCoercionMode.ToString, mode: StringCoercionMode,
copyToStore: boolean = false, copyToStore: boolean = false,
outContext?: NixStringContext, outContext?: NixStringContext,
): string => { ): string => {

View File

@@ -1,14 +1,9 @@
import type { NixValue, NixAttrs } from "../types"; import type { NixValue, NixAttrs } from "../types";
import { forceString, forceList, forceNixString } from "../type-assert"; import { forceString, forceList } from "../type-assert";
import { force } from "../thunk"; import { force } from "../thunk";
import { type DerivationData, type OutputInfo, generateAterm } from "../derivation-helpers"; import { type DerivationData, type OutputInfo, generateAterm } from "../derivation-helpers";
import { coerceToString, StringCoercionMode } from "./conversion"; import { coerceToString, StringCoercionMode } from "./conversion";
import { import { type NixStringContext, extractInputDrvsAndSrcs, isStringWithContext } from "../string-context";
type NixStringContext,
extractInputDrvsAndSrcs,
isStringWithContext,
HAS_CONTEXT,
} from "../string-context";
import { nixValueToJson } from "../conversion"; import { nixValueToJson } from "../conversion";
const forceAttrs = (value: NixValue): NixAttrs => { const forceAttrs = (value: NixValue): NixAttrs => {

View File

@@ -4,7 +4,7 @@
import { CatchableError, HAS_CONTEXT, type NixValue } from "../types"; import { CatchableError, HAS_CONTEXT, type NixValue } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
import { forceString } from "../type-assert"; import { coerceToString, StringCoercionMode } from "./conversion";
export const seq = export const seq =
(e1: NixValue) => (e1: NixValue) =>
@@ -34,7 +34,7 @@ export const abort = (s: NixValue): never => {
}; };
export const throwFunc = (s: NixValue): never => { export const throwFunc = (s: NixValue): never => {
throw new CatchableError(forceString(s)); throw new CatchableError(coerceToString(s, StringCoercionMode.Base));
}; };
export const trace = (e1: NixValue, e2: NixValue): NixValue => { export const trace = (e1: NixValue, e2: NixValue): NixValue => {

View File

@@ -175,6 +175,8 @@ export const builtins: any = {
substring: mkPrimop(string.substring, "substring", 3), substring: mkPrimop(string.substring, "substring", 3),
concatStringsSep: mkPrimop(string.concatStringsSep, "concatStringsSep", 2), concatStringsSep: mkPrimop(string.concatStringsSep, "concatStringsSep", 2),
baseNameOf: mkPrimop(string.baseNameOf, "baseNameOf", 1), baseNameOf: mkPrimop(string.baseNameOf, "baseNameOf", 1),
match: mkPrimop(string.match, "match", 2),
split: mkPrimop(string.split, "split", 2),
seq: mkPrimop(functional.seq, "seq", 2), seq: mkPrimop(functional.seq, "seq", 2),
deepSeq: mkPrimop(functional.deepSeq, "deepSeq", 2), deepSeq: mkPrimop(functional.deepSeq, "deepSeq", 2),
@@ -234,14 +236,12 @@ export const builtins: any = {
functionArgs: mkPrimop(misc.functionArgs, "functionArgs", 1), functionArgs: mkPrimop(misc.functionArgs, "functionArgs", 1),
genericClosure: mkPrimop(misc.genericClosure, "genericClosure", 1), genericClosure: mkPrimop(misc.genericClosure, "genericClosure", 1),
getFlake: mkPrimop(misc.getFlake, "getFlake", 1), getFlake: mkPrimop(misc.getFlake, "getFlake", 1),
match: mkPrimop(misc.match, "match", 2),
outputOf: mkPrimop(misc.outputOf, "outputOf", 2), outputOf: mkPrimop(misc.outputOf, "outputOf", 2),
parseDrvName: mkPrimop(misc.parseDrvName, "parseDrvName", 1), parseDrvName: mkPrimop(misc.parseDrvName, "parseDrvName", 1),
parseFlakeName: mkPrimop(misc.parseFlakeName, "parseFlakeName", 1), parseFlakeName: mkPrimop(misc.parseFlakeName, "parseFlakeName", 1),
parseFlakeRef: mkPrimop(misc.parseFlakeRef, "parseFlakeRef", 1), parseFlakeRef: mkPrimop(misc.parseFlakeRef, "parseFlakeRef", 1),
placeholder: mkPrimop(misc.placeholder, "placeholder", 1), placeholder: mkPrimop(misc.placeholder, "placeholder", 1),
replaceStrings: mkPrimop(misc.replaceStrings, "replaceStrings", 3), replaceStrings: mkPrimop(misc.replaceStrings, "replaceStrings", 3),
split: mkPrimop(misc.split, "split", 2),
splitVersion: mkPrimop(misc.splitVersion, "splitVersion", 1), splitVersion: mkPrimop(misc.splitVersion, "splitVersion", 1),
traceVerbose: mkPrimop(misc.traceVerbose, "traceVerbose", 2), traceVerbose: mkPrimop(misc.traceVerbose, "traceVerbose", 2),
tryEval: mkPrimop(misc.tryEval, "tryEval", 1), tryEval: mkPrimop(misc.tryEval, "tryEval", 1),
@@ -259,6 +259,6 @@ export const builtins: any = {
langVersion: 6, langVersion: 6,
nixPath: [], nixPath: [],
nixVersion: "NIX_JS_VERSION", nixVersion: "2.31.2",
storeDir: "/nix/store", storeDir: "/nix/store",
}; };

View File

@@ -6,13 +6,13 @@
import { forceAttrs, forceBool, forceString } from "../type-assert"; import { forceAttrs, forceBool, forceString } from "../type-assert";
import type { NixValue, NixAttrs } from "../types"; import type { NixValue, NixAttrs } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
import { coerceToString, StringCoercionMode } from "./conversion";
// Declare Deno.core.ops global (provided by deno_core runtime) // Declare Deno.core.ops global (provided by deno_core runtime)
export const importFunc = (path: NixValue): NixValue => { export const importFunc = (path: NixValue): NixValue => {
// For MVP: only support string paths // TODO: context?
// TODO: After implementing path type, also accept path values const pathStr = coerceToString(path, StringCoercionMode.Base);
const pathStr = forceString(path);
// Call Rust op - returns JS code string // Call Rust op - returns JS code string
const code = Deno.core.ops.op_import(pathStr); const code = Deno.core.ops.op_import(pathStr);
@@ -42,6 +42,7 @@ interface FetchUrlResult {
interface FetchTarballResult { interface FetchTarballResult {
store_path: string; store_path: string;
hash: string; hash: string;
nar_hash: string;
} }
interface FetchGitResult { interface FetchGitResult {
@@ -79,6 +80,26 @@ const normalizeUrlInput = (
return { url, hash, name, executable }; return { url, hash, name, executable };
}; };
const normalizeTarballInput = (
args: NixValue,
): { url: string; hash?: string; narHash?: string; name?: string } => {
const forced = force(args);
if (typeof forced === "string") {
return { url: forced };
}
const attrs = forceAttrs(args);
const url = forceString(attrs.url);
const hash = "hash" in attrs ? forceString(attrs.hash) : undefined;
const narHash =
"narHash" in attrs
? forceString(attrs.narHash)
: "sha256" in attrs
? forceString(attrs.sha256)
: undefined;
const name = "name" in attrs ? forceString(attrs.name) : undefined;
return { url, hash, narHash, name };
};
export const fetchurl = (args: NixValue): string => { export const fetchurl = (args: NixValue): string => {
const { url, hash, name, executable } = normalizeUrlInput(args); const { url, hash, name, executable } = normalizeUrlInput(args);
const result: FetchUrlResult = Deno.core.ops.op_fetch_url( const result: FetchUrlResult = Deno.core.ops.op_fetch_url(
@@ -91,8 +112,13 @@ export const fetchurl = (args: NixValue): string => {
}; };
export const fetchTarball = (args: NixValue): string => { export const fetchTarball = (args: NixValue): string => {
const { url, hash, name } = normalizeUrlInput(args); const { url, hash, narHash, name } = normalizeTarballInput(args);
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, hash ?? null, name ?? null); const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(
url,
hash ?? null,
narHash ?? null,
name ?? null,
);
return result.store_path; return result.store_path;
}; };
@@ -191,15 +217,36 @@ const fetchGitForge = (forge: string, attrs: NixAttrs): NixAttrs => {
const owner = forceString(attrs.owner); const owner = forceString(attrs.owner);
const repo = forceString(attrs.repo); const repo = forceString(attrs.repo);
const rev = "rev" in attrs ? forceString(attrs.rev) : "ref" in attrs ? forceString(attrs.ref) : "HEAD"; const rev = "rev" in attrs ? forceString(attrs.rev) : "ref" in attrs ? forceString(attrs.ref) : "HEAD";
const host = "host" in attrs ? forceString(attrs.host) : undefined;
const baseUrls: Record<string, string> = { let tarballUrl: string;
github: "https://github.com", switch (forge) {
gitlab: "https://gitlab.com", case "github": {
sourcehut: "https://git.sr.ht", const apiHost = host || "github.com";
tarballUrl = `https://api.${apiHost}/repos/${owner}/${repo}/tarball/${rev}`;
break;
}
case "gitlab": {
const glHost = host || "gitlab.com";
tarballUrl = `https://${glHost}/api/v4/projects/${owner}%2F${repo}/repository/archive.tar.gz?sha=${rev}`;
break;
}
case "sourcehut": {
const shHost = host || "git.sr.ht";
tarballUrl = `https://${shHost}/${owner}/${repo}/archive/${rev}.tar.gz`;
break;
}
default:
throw new Error(`Unknown forge type: ${forge}`);
}
const outPath = fetchTarball({ url: tarballUrl, ...attrs });
return {
outPath,
rev,
shortRev: rev.substring(0, 7),
}; };
const url = `${baseUrls[forge]}/${owner}/${repo}`;
return fetchGit({ ...attrs, url, rev });
}; };
const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => { const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {

View File

@@ -5,7 +5,7 @@
import { force } from "../thunk"; import { force } from "../thunk";
import { CatchableError } from "../types"; import { CatchableError } from "../types";
import type { NixBool, NixStrictValue, NixValue } from "../types"; import type { NixBool, NixStrictValue, NixValue } from "../types";
import { forceList, forceAttrs, forceFunction } from "../type-assert"; import { forceList, forceAttrs, forceFunction, forceString } from "../type-assert";
import * as context from "./context"; import * as context from "./context";
export const addErrorContext = export const addErrorContext =
@@ -48,10 +48,98 @@ export const addDrvOutputDependencies = context.addDrvOutputDependencies;
export const compareVersions = export const compareVersions =
(s1: NixValue) => (s1: NixValue) =>
(s2: NixValue): never => { (s2: NixValue): NixValue => {
throw new Error("Not implemented: compareVersions"); const str1 = forceString(s1);
const str2 = forceString(s2);
let i1 = 0;
let i2 = 0;
while (i1 < str1.length || i2 < str2.length) {
const c1 = nextComponent(str1, i1);
const c2 = nextComponent(str2, i2);
i1 = c1.nextIndex;
i2 = c2.nextIndex;
if (componentsLT(c1.component, c2.component)) {
return -1n;
} else if (componentsLT(c2.component, c1.component)) {
return 1n;
}
}
return 0n;
}; };
interface ComponentResult {
component: string;
nextIndex: number;
}
function nextComponent(s: string, startIdx: number): ComponentResult {
let p = startIdx;
// Skip any dots and dashes (component separators)
while (p < s.length && (s[p] === "." || s[p] === "-")) {
p++;
}
if (p >= s.length) {
return { component: "", nextIndex: p };
}
const start = p;
// If the first character is a digit, consume the longest sequence of digits
if (s[p] >= "0" && s[p] <= "9") {
while (p < s.length && s[p] >= "0" && s[p] <= "9") {
p++;
}
} else {
// Otherwise, consume the longest sequence of non-digit, non-separator characters
while (p < s.length && !(s[p] >= "0" && s[p] <= "9") && s[p] !== "." && s[p] !== "-") {
p++;
}
}
return { component: s.substring(start, p), nextIndex: p };
}
function componentsLT(c1: string, c2: string): boolean {
const n1 = c1.match(/^[0-9]+$/) ? BigInt(c1) : null;
const n2 = c2.match(/^[0-9]+$/) ? BigInt(c2) : null;
// Both are numbers: compare numerically
if (n1 !== null && n2 !== null) {
return n1 < n2;
}
// Empty string < number
if (c1 === "" && n2 !== null) {
return true;
}
// Special case: "pre" comes before everything except another "pre"
if (c1 === "pre" && c2 !== "pre") {
return true;
}
if (c2 === "pre") {
return false;
}
// Assume that `2.3a' < `2.3.1'
if (n2 !== null) {
return true;
}
if (n1 !== null) {
return false;
}
// Both are strings: compare lexicographically
return c1 < c2;
}
export const dirOf = (s: NixValue): never => { export const dirOf = (s: NixValue): never => {
throw new Error("Not implemented: dirOf"); throw new Error("Not implemented: dirOf");
}; };
@@ -72,12 +160,6 @@ export const getFlake = (attrs: NixValue): never => {
throw new Error("Not implemented: getFlake"); throw new Error("Not implemented: getFlake");
}; };
export const match =
(regex: NixValue) =>
(str: NixValue): never => {
throw new Error("Not implemented: match");
};
export const outputOf = export const outputOf =
(drv: NixValue) => (drv: NixValue) =>
(out: NixValue): never => { (out: NixValue): never => {
@@ -103,16 +185,77 @@ export const placeholder = (output: NixValue): never => {
export const replaceStrings = export const replaceStrings =
(from: NixValue) => (from: NixValue) =>
(to: NixValue) => (to: NixValue) =>
(s: NixValue): never => { (s: NixValue): NixValue => {
throw new Error("Not implemented: replaceStrings"); const fromList = forceList(from);
const toList = forceList(to);
const inputStr = forceString(s);
if (fromList.length !== toList.length) {
throw new Error(
"'from' and 'to' arguments passed to builtins.replaceStrings have different lengths"
);
}
const toCache = new Map<number, string>();
let result = "";
let pos = 0;
while (pos <= inputStr.length) {
let found = false;
for (let i = 0; i < fromList.length; i++) {
const pattern = forceString(fromList[i]);
if (inputStr.substring(pos).startsWith(pattern)) {
found = true;
if (!toCache.has(i)) {
toCache.set(i, forceString(toList[i]));
}
const replacement = toCache.get(i)!;
result += replacement;
if (pattern.length === 0) {
if (pos < inputStr.length) {
result += inputStr[pos];
}
pos++;
} else {
pos += pattern.length;
}
break;
}
}
if (!found) {
if (pos < inputStr.length) {
result += inputStr[pos];
}
pos++;
}
}
return result;
}; };
export const split = (regex: NixValue, str: NixValue): never => {
throw new Error("Not implemented: split");
};
export const splitVersion = (s: NixValue): never => { export const splitVersion = (s: NixValue): NixValue => {
throw new Error("Not implemented: splitVersion"); const version = forceString(s);
const components: string[] = [];
let idx = 0;
while (idx < version.length) {
const result = nextComponent(version, idx);
if (result.component === "") {
break;
}
components.push(result.component);
idx = result.nextIndex;
}
return components;
}; };
export const traceVerbose = (e1: NixValue, e2: NixValue): never => { export const traceVerbose = (e1: NixValue, e2: NixValue): never => {

View File

@@ -4,6 +4,7 @@
import type { NixInt, NixValue } from "../types"; import type { NixInt, NixValue } from "../types";
import { forceString, forceList, forceInt } from "../type-assert"; import { forceString, forceList, forceInt } from "../type-assert";
import { coerceToString, StringCoercionMode } from "./conversion";
export const stringLength = (e: NixValue): NixInt => BigInt(forceString(e).length); export const stringLength = (e: NixValue): NixInt => BigInt(forceString(e).length);
@@ -20,7 +21,10 @@ export const substring =
export const concatStringsSep = export const concatStringsSep =
(sep: NixValue) => (sep: NixValue) =>
(list: NixValue): string => (list: NixValue): string =>
forceList(list).join(forceString(sep)); // FIXME: context?
forceList(list)
.map((elem) => coerceToString(elem, StringCoercionMode.Interpolation))
.join(forceString(sep));
export const baseNameOf = (x: NixValue): string => { export const baseNameOf = (x: NixValue): string => {
const str = forceString(x); const str = forceString(x);
@@ -38,3 +42,122 @@ export const baseNameOf = (x: NixValue): string => {
return str.substring(pos, last + 1); return str.substring(pos, last + 1);
}; };
const POSIX_CLASSES: Record<string, string> = {
alnum: "a-zA-Z0-9",
alpha: "a-zA-Z",
blank: " \\t",
digit: "0-9",
lower: "a-z",
upper: "A-Z",
space: "\\s",
xdigit: "0-9A-Fa-f",
punct: "\\-!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~",
};
function posixToJsRegex(pattern: string, fullMatch: boolean = false): RegExp {
let jsPattern = pattern;
jsPattern = jsPattern.replace(/\[(\^?)(?:\[:(\w+):\])+\]/g, (match) => {
const isNegated = match[1] === "^";
const classNames = [...match.matchAll(/\[:(\w+):\]/g)].map((m) => m[1]);
const combined = classNames
.map((className) => {
const replacement = POSIX_CLASSES[className];
if (!replacement) {
throw new Error(`Unknown POSIX character class: ${className}`);
}
return replacement;
})
.join("");
return isNegated ? `[^${combined}]` : `[${combined}]`;
});
jsPattern = jsPattern.replace(/\[:(\w+):\]/g, (_match, className) => {
const replacement = POSIX_CLASSES[className];
if (!replacement) {
throw new Error(`Unknown POSIX character class: ${className}`);
}
return replacement;
});
if (fullMatch) {
if (!jsPattern.startsWith("^")) {
jsPattern = "^" + jsPattern;
}
if (!jsPattern.endsWith("$")) {
jsPattern = jsPattern + "$";
}
}
return new RegExp(jsPattern, "u");
}
export const match =
(regex: NixValue) =>
(str: NixValue): NixValue => {
const regexStr = forceString(regex);
const inputStr = forceString(str);
try {
const re = posixToJsRegex(regexStr, true);
const result = inputStr.match(re);
if (!result) {
return null;
}
const groups: NixValue[] = [];
for (let i = 1; i < result.length; i++) {
groups.push(result[i] !== undefined ? result[i] : null);
}
return groups;
} catch (e) {
throw new Error(`Invalid regular expression '${regexStr}': ${e}`);
}
};
export const split =
(regex: NixValue) =>
(str: NixValue): NixValue => {
const regexStr = forceString(regex);
const inputStr = forceString(str);
try {
const re = posixToJsRegex(regexStr);
const reGlobal = new RegExp(re.source, re.flags + "g");
const result: NixValue[] = [];
let lastIndex = 0;
let match: RegExpExecArray | null;
while ((match = reGlobal.exec(inputStr)) !== null) {
result.push(inputStr.substring(lastIndex, match.index));
const groups: NixValue[] = [];
for (let i = 1; i < match.length; i++) {
groups.push(match[i] !== undefined ? match[i] : null);
}
result.push(groups);
lastIndex = match.index + match[0].length;
if (match[0].length === 0) {
reGlobal.lastIndex++;
}
}
if (lastIndex === 0) {
return [inputStr];
}
result.push(inputStr.substring(lastIndex));
return result;
} catch (e) {
throw new Error(`Invalid regular expression '${regexStr}': ${e}`);
}
};

View File

@@ -35,6 +35,7 @@ export const nixValueToJson = (
if (seen.has(v)) { if (seen.has(v)) {
throw new Error("derivation: circular reference detected in __structuredAttrs"); throw new Error("derivation: circular reference detected in __structuredAttrs");
} }
// FIXME: tryAttrsToString
seen.add(v); seen.add(v);
} }

View File

@@ -50,7 +50,7 @@ export const select = (obj: NixValue, attrpath: NixValue[]): NixValue => {
let attrs = forceAttrs(obj); let attrs = forceAttrs(obj);
for (const attr of attrpath.slice(0, -1)) { for (const attr of attrpath.slice(0, -1)) {
const key = forceString(attr) const key = forceString(attr);
if (!(key in attrs)) { if (!(key in attrs)) {
throw new Error(`Attribute '${key}' not found`); throw new Error(`Attribute '${key}' not found`);
} }
@@ -63,7 +63,7 @@ export const select = (obj: NixValue, attrpath: NixValue[]): NixValue => {
attrs = cur; attrs = cur;
} }
const last = forceString(attrpath[attrpath.length - 1]) const last = forceString(attrpath[attrpath.length - 1]);
if (!(last in attrs)) { if (!(last in attrs)) {
throw new Error(`Attribute '${last}' not found`); throw new Error(`Attribute '${last}' not found`);
} }
@@ -74,9 +74,9 @@ export const selectWithDefault = (obj: NixValue, attrpath: NixValue[], default_v
let attrs = forceAttrs(obj); let attrs = forceAttrs(obj);
for (const attr of attrpath.slice(0, -1)) { for (const attr of attrpath.slice(0, -1)) {
const key = forceString(attr) const key = forceString(attr);
if (!(key in attrs)) { if (!(key in attrs)) {
return default_val return default_val;
} }
const cur = force(attrs[key]); const cur = force(attrs[key]);
if (!isAttrs(cur)) { if (!isAttrs(cur)) {
@@ -87,7 +87,7 @@ export const selectWithDefault = (obj: NixValue, attrpath: NixValue[], default_v
const last = forceString(attrpath[attrpath.length - 1]); const last = forceString(attrpath[attrpath.length - 1]);
if (last in attrs) { if (last in attrs) {
return attrs[last] return attrs[last];
} }
return default_val; return default_val;
}; };

View File

@@ -175,6 +175,26 @@ export const op = {
}, },
update: (a: NixValue, b: NixValue): NixAttrs => { update: (a: NixValue, b: NixValue): NixAttrs => {
return { ...forceAttrs(a), ...forceAttrs(b) }; const forcedA = forceAttrs(a);
const forcedB = forceAttrs(b);
const newAttrs: NixAttrs = {};
for (const key in forcedA) {
Object.defineProperty(newAttrs, key, {
get: () => force(forcedA[key]),
enumerable: true,
configurable: true,
});
}
for (const key in forcedB) {
Object.defineProperty(newAttrs, key, {
get: () => force(forcedB[key]),
enumerable: true,
configurable: true,
});
}
return newAttrs;
}, },
}; };

View File

@@ -62,28 +62,6 @@ export const mergeContexts = (...contexts: NixStringContext[]): NixStringContext
return result; return result;
}; };
export const concatStringsWithContext = (
strings: (string | StringWithContext)[],
): string | StringWithContext => {
const parts: string[] = [];
const contexts: NixStringContext[] = [];
for (const s of strings) {
parts.push(getStringValue(s));
const ctx = getStringContext(s);
if (ctx.size > 0) {
contexts.push(ctx);
}
}
const value = parts.join("");
if (contexts.length === 0) {
return value;
}
return mkStringWithContext(value, mergeContexts(...contexts));
};
export const encodeContextElem = (elem: StringContextElem): string => { export const encodeContextElem = (elem: StringContextElem): string => {
switch (elem.type) { switch (elem.type) {
case "opaque": case "opaque":

View File

@@ -81,7 +81,7 @@ export const force = (value: NixValue): NixStrictValue => {
if (value.func === undefined) { if (value.func === undefined) {
if (value.result === undefined) { if (value.result === undefined) {
const thunk = value as NixThunk; const thunk = value as NixThunk;
let msg = `infinite recursion encountered (blackhole) at ${thunk}\n`; let msg = `infinite recursion encountered at ${thunk}\n`;
msg += "Force chain (most recent first):\n"; msg += "Force chain (most recent first):\n";
for (let i = forceStack.length - 1; i >= 0; i--) { for (let i = forceStack.length - 1; i >= 0; i--) {
const t = forceStack[i]; const t = forceStack[i];

View File

@@ -8,6 +8,7 @@ interface FetchUrlResult {
interface FetchTarballResult { interface FetchTarballResult {
store_path: string; store_path: string;
hash: string; hash: string;
nar_hash: string;
} }
interface FetchGitResult { interface FetchGitResult {
@@ -56,6 +57,7 @@ declare global {
function op_fetch_tarball( function op_fetch_tarball(
url: string, url: string,
expected_hash: string | null, expected_hash: string | null,
expected_nar_hash: string | null,
name: string | null, name: string | null,
): FetchTarballResult; ): FetchTarballResult;
function op_fetch_git( function op_fetch_git(

View File

@@ -39,9 +39,11 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
match self { match self {
Ir::Int(int) => format!("{int}n"), // Generate BigInt literal Ir::Int(int) => format!("{int}n"), // Generate BigInt literal
Ir::Float(float) => float.to_string(), Ir::Float(float) => float.to_string(),
Ir::Bool(bool) => bool.to_string(),
Ir::Null(_) => "null".to_string(),
Ir::Str(s) => s.val.escape_quote(), Ir::Str(s) => s.val.escape_quote(),
Ir::Path(p) => { Ir::Path(p) => {
// Path needs runtime resolution for interpolated paths // Path needs runtime resolution
let path_expr = ctx.get_ir(p.expr).compile(ctx); let path_expr = ctx.get_ir(p.expr).compile(ctx);
format!("Nix.resolvePath({})", path_expr) format!("Nix.resolvePath({})", path_expr)
} }
@@ -76,7 +78,7 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
&Ir::Assert(Assert { assertion, expr }) => { &Ir::Assert(Assert { assertion, expr }) => {
let assertion = ctx.get_ir(assertion).compile(ctx); let assertion = ctx.get_ir(assertion).compile(ctx);
let expr = ctx.get_ir(expr).compile(ctx); let expr = ctx.get_ir(expr).compile(ctx);
format!("({assertion})?({expr}):(()=>{{throw \"assertion failed\"}})()") format!("({assertion})?({expr}):(()=>{{throw new Error(\"assertion failed\")}})()")
} }
} }
} }
@@ -104,8 +106,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
Impl => format!("(!Nix.force({}) || Nix.force({}))", lhs, rhs), Impl => format!("(!Nix.force({}) || Nix.force({}))", lhs, rhs),
Con => format!("Nix.op.concat({},{})", lhs, rhs), Con => format!("Nix.op.concat({},{})", lhs, rhs),
Upd => format!("Nix.op.update({},{})", lhs, rhs), Upd => format!("Nix.op.update({},{})", lhs, rhs),
PipeL => format!("Nix.force({})({})", rhs, lhs), PipeL => format!("Nix.call({}, {})", rhs, lhs),
PipeR => format!("Nix.force({})({})", lhs, rhs), PipeR => format!("Nix.call({}, {})", lhs, rhs),
} }
} }
} }
@@ -184,37 +186,46 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Call {
} }
} }
/// Determines if a Thunk should be kept (not unwrapped) for non-recursive let bindings.
/// Returns true for complex expressions that should remain lazy to preserve Nix semantics.
fn should_keep_thunk(ir: &Ir) -> bool {
match ir {
// Simple literals can be evaluated eagerly
Ir::Int(_) | Ir::Float(_) | Ir::Bool(_) | Ir::Null(_) | Ir::Str(_) => false,
// Builtin references are safe to evaluate eagerly
Ir::Builtin(_) | Ir::Builtins(_) => false,
Ir::ExprRef(_) => false,
// Everything else should remain lazy:
_ => true,
}
}
fn unwrap_thunk(ir: &Ir, ctx: &impl CodegenContext) -> String {
if let Ir::Thunk(inner) = ir {
let inner_ir = ctx.get_ir(*inner);
if should_keep_thunk(inner_ir) {
ir.compile(ctx)
} else {
inner_ir.compile(ctx)
}
} else {
ir.compile(ctx)
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for Let { impl<Ctx: CodegenContext> Compile<Ctx> for Let {
fn compile(&self, ctx: &Ctx) -> String { fn compile(&self, ctx: &Ctx) -> String {
let info = &self.binding_sccs; let info = &self.binding_sccs;
let mut js_statements = Vec::new(); let mut js_statements = Vec::new();
for (scc_exprs, is_recursive) in info.sccs.iter() { for (scc_exprs, is_recursive) in info.sccs.iter() {
if *is_recursive { for &expr in scc_exprs {
for &expr in scc_exprs { let value = if *is_recursive {
js_statements.push(format!("let expr{}", expr.0)); ctx.get_ir(expr).compile(ctx)
} } else {
for &expr in scc_exprs { unwrap_thunk(ctx.get_ir(expr), ctx)
let value = ctx.get_ir(expr).compile(ctx); };
js_statements.push(format!("expr{}={}", expr.0, value)); js_statements.push(format!("const expr{}={}", expr.0, value));
}
} else {
for &expr in scc_exprs {
let ir = ctx.get_ir(expr);
let value = if let Ir::Thunk(inner) = ir {
let inner_ir = ctx.get_ir(*inner);
// Don't unwrap Thunk if inner is a Let expression
// to avoid generating IIFE that executes immediately
if matches!(inner_ir, Ir::Let(_)) {
ir.compile(ctx)
} else {
inner_ir.compile(ctx)
}
} else {
ir.compile(ctx)
};
js_statements.push(format!("const expr{}={}", expr.0, value));
}
} }
} }

View File

@@ -81,7 +81,7 @@ impl Context {
let ctx = guard.as_ctx(); let ctx = guard.as_ctx();
let code = ctx.compile_code(expr)?; let code = ctx.compile_code(expr)?;
self.runtime.eval(code, CtxPtr::new(&mut self.ctx)) self.runtime.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
} }
pub fn compile_code(&mut self, expr: &str) -> Result<String> { pub fn compile_code(&mut self, expr: &str) -> Result<String> {
@@ -116,9 +116,6 @@ impl Default for Ctx {
global.insert(builtins_sym, builtins_expr); global.insert(builtins_sym, builtins_expr);
let free_globals = [ let free_globals = [
"true",
"false",
"null",
"abort", "abort",
"baseNameOf", "baseNameOf",
"break", "break",
@@ -139,6 +136,11 @@ impl Default for Ctx {
"throw", "throw",
"toString", "toString",
]; ];
let consts = [
("true", Ir::Bool(true)),
("false", Ir::Bool(false)),
("null", Ir::Null(())),
];
for name in free_globals { for name in free_globals {
let name_sym = symbols.get_or_intern(name); let name_sym = symbols.get_or_intern(name);
@@ -146,6 +148,12 @@ impl Default for Ctx {
irs.push(Builtin(name_sym).to_ir()); irs.push(Builtin(name_sym).to_ir());
global.insert(name_sym, id); global.insert(name_sym, id);
} }
for (name, value) in consts {
let name_sym = symbols.get_or_intern(name);
let id = ExprId(irs.len());
irs.push(value);
global.insert(name_sym, id);
}
Self { Self {
symbols, symbols,
@@ -195,7 +203,7 @@ impl Ctx {
} else { } else {
"" ""
}; };
let code = format!("({}Nix.force({}))", debug_prefix, code); let code = format!("({}{})", debug_prefix, code);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
eprintln!("[DEBUG] generated code: {}", &code); eprintln!("[DEBUG] generated code: {}", &code);
Ok(code) Ok(code)

View File

@@ -231,10 +231,6 @@ impl DowngradeContext for DowngradeCtx<'_> {
f(guard.as_ctx()) f(guard.as_ctx())
} }
fn get_current_dir(&self) -> std::path::PathBuf {
self.ctx.get_current_dir()
}
fn push_dep_tracker(&mut self, slots: &[ExprId]) { fn push_dep_tracker(&mut self, slots: &[ExprId]) {
let mut graph = Graph::new(); let mut graph = Graph::new();
let mut expr_to_node = HashMap::new(); let mut expr_to_node = HashMap::new();

View File

@@ -23,6 +23,7 @@ pub struct FetchUrlResult {
pub struct FetchTarballResult { pub struct FetchTarballResult {
pub store_path: String, pub store_path: String,
pub hash: String, pub hash: String,
pub nar_hash: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@@ -54,6 +55,8 @@ pub fn op_fetch_url(
#[string] name: Option<String>, #[string] name: Option<String>,
executable: bool, executable: bool,
) -> Result<FetchUrlResult, NixError> { ) -> Result<FetchUrlResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchurl: {}", url);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new(); let downloader = Downloader::new();
@@ -100,40 +103,87 @@ pub fn op_fetch_url(
pub fn op_fetch_tarball( pub fn op_fetch_tarball(
#[string] url: String, #[string] url: String,
#[string] expected_hash: Option<String>, #[string] expected_hash: Option<String>,
#[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> { ) -> Result<FetchTarballResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}", url, expected_hash, expected_nar_hash);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let downloader = Downloader::new(); let downloader = Downloader::new();
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());
if let Some(ref hash) = expected_hash { // Try cache lookup with narHash if provided
let normalized = normalize_hash(hash); if let Some(ref nar_hash) = expected_nar_hash {
if let Some(cached) = cache.get_tarball(&url, &normalized) { let normalized = normalize_hash(nar_hash);
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: normalized nar_hash={}", normalized);
if let Some(cached) = cache.get_extracted_tarball(&url, &normalized) {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (with expected nar_hash)");
// Need to compute tarball hash if not cached
let tarball_hash = expected_hash.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
return Ok(FetchTarballResult { return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(), store_path: cached.to_string_lossy().to_string(),
hash: normalized, hash: tarball_hash,
nar_hash: normalized,
}); });
} }
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
} else if let Some((cached, cached_nar_hash)) = cache.get_extracted_tarball_by_url(&url) {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache HIT (by URL, nar_hash={})", cached_nar_hash);
let tarball_hash = expected_hash.as_ref()
.map(|h| normalize_hash(h))
.unwrap_or_else(|| "".to_string());
return Ok(FetchTarballResult {
store_path: cached.to_string_lossy().to_string(),
hash: tarball_hash,
nar_hash: cached_nar_hash,
});
} }
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: cache MISS, downloading...");
let data = downloader let data = downloader
.download(&url) .download(&url)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
// Compute tarball hash (hash of the archive file itself)
let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
// Verify tarball hash if provided
if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected);
if tarball_hash != normalized_expected {
return Err(NixError::from(format!(
"Tarball hash mismatch for '{}': expected {}, got {}",
url, normalized_expected, tarball_hash
)));
}
}
let temp_dir = tempfile::tempdir().map_err(|e| NixError::from(e.to_string()))?; let temp_dir = tempfile::tempdir().map_err(|e| NixError::from(e.to_string()))?;
let extracted_path = archive::extract_archive(&data, &temp_dir.path().to_path_buf()) let extracted_path = archive::extract_archive(&data, temp_dir.path())
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
// Compute NAR hash (hash of the extracted content)
let nar_hash = let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?; nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
if let Some(ref expected) = expected_hash { #[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchTarball: computed tarball_hash={}, nar_hash={}", tarball_hash, nar_hash);
// Verify NAR hash if provided
if let Some(ref expected) = expected_nar_hash {
let normalized_expected = normalize_hash(expected); let normalized_expected = normalize_hash(expected);
if nar_hash != normalized_expected { if nar_hash != normalized_expected {
return Err(NixError::from(format!( return Err(NixError::from(format!(
"hash mismatch for '{}': expected {}, got {}", "NAR hash mismatch for '{}': expected {}, got {}",
url, normalized_expected, nar_hash url, normalized_expected, nar_hash
))); )));
} }
@@ -145,7 +195,8 @@ pub fn op_fetch_tarball(
Ok(FetchTarballResult { Ok(FetchTarballResult {
store_path: store_path.to_string_lossy().to_string(), store_path: store_path.to_string_lossy().to_string(),
hash: nar_hash, hash: tarball_hash,
nar_hash,
}) })
} }
@@ -160,6 +211,8 @@ pub fn op_fetch_git(
all_refs: bool, all_refs: bool,
#[string] name: Option<String>, #[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> { ) -> Result<FetchGitResult, NixError> {
#[cfg(debug_assertions)]
eprintln!("[DEBUG] fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev);
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());

View File

@@ -101,7 +101,12 @@ impl FetcherCache {
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?; serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
if meta.hash == expected_hash { if meta.hash == expected_hash {
Some(data_path) let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
Some(store_path)
} else {
None
}
} else { } else {
None None
} }
@@ -163,16 +168,37 @@ impl FetcherCache {
let meta_path = cache_dir.join(&key).join(".meta"); let meta_path = cache_dir.join(&key).join(".meta");
let data_dir = cache_dir.join(&key); let data_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: url={}, expected_hash={}", url, expected_hash);
if !meta_path.exists() || !data_dir.exists() { if !meta_path.exists() || !data_dir.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: cache miss - meta or data dir not found");
return None; return None;
} }
let meta: CacheMetadata = let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?; serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: cached hash={}, name={}", meta.hash, meta.name);
if meta.hash == expected_hash { if meta.hash == expected_hash {
Some(self.make_store_path(&meta.hash, &meta.name)) let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash match, checking store_path={}", store_path.display());
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: HIT - returning store path");
Some(store_path)
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: store path doesn't exist");
None
}
} else { } else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_hash);
None None
} }
} }
@@ -208,6 +234,82 @@ impl FetcherCache {
Ok(store_path) Ok(store_path)
} }
pub fn get_extracted_tarball(&self, url: &str, expected_nar_hash: &str) -> Option<PathBuf> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: url={}, expected_nar_hash={}", url, expected_nar_hash);
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: cached hash={}, name={}", meta.hash, meta.name);
if meta.hash == expected_nar_hash {
let store_path = self.make_store_path(&meta.hash, &meta.name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash match, checking store_path={}", store_path.display());
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: HIT - returning store path");
Some(store_path)
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: store path doesn't exist");
None
}
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball: hash mismatch (cached={}, expected={})", meta.hash, expected_nar_hash);
None
}
}
pub fn get_extracted_tarball_by_url(&self, url: &str) -> Option<(PathBuf, String)> {
let cache_dir = self.tarball_cache_dir();
let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key);
let meta_path = cache_entry_dir.join(".meta");
let cached_content = cache_entry_dir.join("content");
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: url={}", url);
if !meta_path.exists() || !cached_content.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cache miss - meta or content dir not found");
return None;
}
let meta: CacheMetadata =
serde_json::from_str(&fs::read_to_string(&meta_path).ok()?).ok()?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: cached hash={}, name={}", meta.hash, meta.name);
let store_path = self.make_store_path(&meta.hash, &meta.name);
if store_path.exists() {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: HIT - returning store path and hash");
Some((store_path, meta.hash))
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] get_extracted_tarball_by_url: store path doesn't exist");
None
}
}
pub fn put_tarball_from_extracted( pub fn put_tarball_from_extracted(
&self, &self,
url: &str, url: &str,
@@ -219,6 +321,9 @@ impl FetcherCache {
let key = Self::hash_key(url); let key = Self::hash_key(url);
let cache_entry_dir = cache_dir.join(&key); let cache_entry_dir = cache_dir.join(&key);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: url={}, hash={}, name={}", url, hash, name);
fs::create_dir_all(&cache_entry_dir)?; fs::create_dir_all(&cache_entry_dir)?;
let cached_content = cache_entry_dir.join("content"); let cached_content = cache_entry_dir.join("content");
@@ -234,9 +339,16 @@ impl FetcherCache {
fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?; fs::write(cache_entry_dir.join(".meta"), serde_json::to_string(&meta)?)?;
let store_path = self.make_store_path(hash, name); let store_path = self.make_store_path(hash, name);
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: store_path={}", store_path.display());
if !store_path.exists() { if !store_path.exists() {
fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?; fs::create_dir_all(store_path.parent().unwrap_or(&store_path))?;
copy_dir_recursive(extracted_path, &store_path)?; copy_dir_recursive(extracted_path, &store_path)?;
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: copied to store");
} else {
#[cfg(debug_assertions)]
eprintln!("[CACHE] put_tarball_from_extracted: store path already exists");
} }
Ok(store_path) Ok(store_path)

View File

@@ -98,6 +98,7 @@ fn write_contents<W: Write>(sink: &mut W, contents: &[u8]) -> io::Result<()> {
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests { mod tests {
use super::*; use super::*;
use tempfile::TempDir; use tempfile::TempDir;

View File

@@ -38,8 +38,6 @@ pub trait DowngradeContext {
where where
F: FnOnce(&mut Self) -> R; F: FnOnce(&mut Self) -> R;
fn get_current_dir(&self) -> std::path::PathBuf;
fn push_dep_tracker(&mut self, slots: &[ExprId]); fn push_dep_tracker(&mut self, slots: &[ExprId]);
fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId); fn push_dep_tracker_with_owner(&mut self, slots: &[ExprId], owner: ExprId);
fn get_current_binding(&self) -> Option<ExprId>; fn get_current_binding(&self) -> Option<ExprId>;
@@ -52,6 +50,8 @@ ir! {
Int(i64), Int(i64),
Float(f64), Float(f64),
Bool(bool),
Null(()),
Str, Str,
AttrSet, AttrSet,
List, List,

View File

@@ -2,28 +2,10 @@
#![allow(clippy::unwrap_used)] #![allow(clippy::unwrap_used)]
use rnix::ast::{self, Expr, HasEntry}; use rnix::ast::{self, Expr, HasEntry};
use std::path::{Component, Path as StdPath, PathBuf};
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use super::*; use super::*;
fn normalize_path(path: &StdPath) -> String {
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
Component::Prefix(p) => normalized.push(p.as_os_str()),
Component::RootDir => normalized.push("/"),
Component::CurDir => {}
Component::ParentDir => {
normalized.pop();
}
Component::Normal(c) => normalized.push(c),
}
}
normalized.to_string_lossy().to_string()
}
pub trait Downgrade<Ctx: DowngradeContext> { pub trait Downgrade<Ctx: DowngradeContext> {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>; fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
} }
@@ -75,46 +57,20 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path { impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> { fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let parts_ast: Vec<_> = self.parts().collect(); let parts = self
let has_interpolation = parts_ast .parts()
.iter() .map(|part| match part {
.any(|part| matches!(part, ast::InterpolPart::Interpolation(_))); ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
let parts = if !has_interpolation { val: lit.to_string(),
// Resolve at compile time
let path_str: String = parts_ast
.into_iter()
.filter_map(|part| match part {
ast::InterpolPart::Literal(lit) => Some(lit.to_string()),
_ => None,
})
.collect();
let resolved_path = if path_str.starts_with('/') {
normalize_path(&std::path::PathBuf::from(&path_str))
} else {
let current_dir = ctx.get_current_dir();
normalize_path(&current_dir.join(&path_str))
};
vec![ctx.new_expr(Str { val: resolved_path }.to_ir())]
} else {
// Resolve at runtime
parts_ast
.into_iter()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.to_string(),
}
.to_ir(),
)),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
} }
}) .to_ir(),
.collect::<Result<Vec<_>>>()? )),
}; ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
}
})
.collect::<Result<Vec<_>>>()?;
let expr = if parts.len() == 1 { let expr = if parts.len() == 1 {
parts.into_iter().next().unwrap() parts.into_iter().next().unwrap()

View File

@@ -1,7 +1,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::path::PathBuf; use std::path::{Component, PathBuf};
use std::sync::Once; use std::sync::Once;
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8}; use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
@@ -125,13 +125,20 @@ fn op_resolve_path<Ctx: RuntimeCtx>(
} }
// Resolve relative path against current file directory (or CWD) // Resolve relative path against current file directory (or CWD)
let current_dir = ctx.get_current_dir(); let current_dir = ctx.get_current_dir().join(&path);
let mut normalized = PathBuf::new();
Ok(current_dir for component in current_dir.components() {
.join(&path) match component {
.canonicalize() Component::Prefix(p) => normalized.push(p.as_os_str()),
.map(|p| p.to_string_lossy().to_string()) Component::RootDir => normalized.push("/"),
.map_err(|e| format!("Failed to resolve path {}: {}", path, e))?) Component::CurDir => {}
Component::ParentDir => {
normalized.pop();
}
Component::Normal(c) => normalized.push(c),
}
}
Ok(normalized.to_string_lossy().to_string())
} }
#[deno_core::op2] #[deno_core::op2]

View File

@@ -147,3 +147,71 @@ fn builtins_concat_lists() {
])) ]))
); );
} }
#[test]
fn builtins_compare_versions_basic() {
assert_eq!(eval("builtins.compareVersions \"1.0\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3\""), Value::Int(0));
assert_eq!(eval("builtins.compareVersions \"2.5\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"3.1\" \"2.3\""), Value::Int(1));
}
#[test]
fn builtins_compare_versions_components() {
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3\" \"2.3.1\""), Value::Int(-1));
}
#[test]
fn builtins_compare_versions_numeric_vs_alpha() {
// Numeric component comes before alpha component
assert_eq!(eval("builtins.compareVersions \"2.3.1\" \"2.3a\""), Value::Int(1));
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3.1\""), Value::Int(-1));
}
#[test]
fn builtins_compare_versions_pre() {
// "pre" is special: comes before everything except another "pre"
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre3\" \"2.3pre12\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3pre1\" \"2.3q\""), Value::Int(-1));
}
#[test]
fn builtins_compare_versions_alpha() {
// Alphabetic comparison
assert_eq!(eval("builtins.compareVersions \"2.3a\" \"2.3c\""), Value::Int(-1));
assert_eq!(eval("builtins.compareVersions \"2.3c\" \"2.3a\""), Value::Int(1));
}
#[test]
fn builtins_compare_versions_symmetry() {
// Test symmetry: compareVersions(a, b) == -compareVersions(b, a)
assert_eq!(
eval("builtins.compareVersions \"1.0\" \"2.3\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"2.3\" \"1.0\""),
Value::Int(1)
);
}
#[test]
fn builtins_compare_versions_complex() {
// Complex version strings with multiple components
assert_eq!(
eval("builtins.compareVersions \"1.2.3.4\" \"1.2.3.5\""),
Value::Int(-1)
);
assert_eq!(
eval("builtins.compareVersions \"1.2.10\" \"1.2.9\""),
Value::Int(1)
);
assert_eq!(
eval("builtins.compareVersions \"1.2a3\" \"1.2a10\""),
Value::Int(-1)
);
}

316
nix-js/tests/regex.rs Normal file
View File

@@ -0,0 +1,316 @@
mod utils;
use nix_js::value::{List, Value};
use utils::eval;
#[test]
fn test_match_exact_full_string() {
assert_eq!(
eval(r#"builtins.match "foobar" "foobar""#),
Value::List(List::new(vec![]))
);
}
#[test]
fn test_match_partial_returns_null() {
assert_eq!(eval(r#"builtins.match "foo" "foobar""#), Value::Null);
}
#[test]
fn test_match_with_capture_groups() {
assert_eq!(
eval(r#"builtins.match "(.*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![Value::String("foobar".into())]))
);
}
#[test]
fn test_match_multiple_capture_groups() {
assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "foobar.nix""#),
Value::List(List::new(vec![
Value::Null,
Value::Null,
Value::String("foobar".into())
]))
);
}
#[test]
fn test_match_with_path() {
assert_eq!(
eval(r#"builtins.match "((.*)/)?([^/]*)\\.nix" "/path/to/foobar.nix""#),
Value::List(List::new(vec![
Value::String("/path/to/".into()),
Value::String("/path/to".into()),
Value::String("foobar".into())
]))
);
}
#[test]
fn test_match_posix_space_class() {
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([^[:space:]]+)[[:space:]]+" " foo ""#),
Value::List(List::new(vec![Value::String("foo".into())]))
);
}
#[test]
fn test_match_posix_upper_class() {
assert_eq!(eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " foo ""#), Value::Null);
assert_eq!(
eval(r#"builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO ""#),
Value::List(List::new(vec![Value::String("FOO".into())]))
);
}
#[test]
fn test_match_quantifiers() {
assert_eq!(
eval(r#"builtins.match "fo*" "f""#),
Value::List(List::new(vec![]))
);
assert_eq!(eval(r#"builtins.match "fo+" "f""#), Value::Null);
assert_eq!(
eval(r#"builtins.match "fo{1,2}" "foo""#),
Value::List(List::new(vec![]))
);
assert_eq!(eval(r#"builtins.match "fo{1,2}" "fooo""#), Value::Null);
}
#[test]
fn test_split_non_capturing() {
assert_eq!(
eval(r#"builtins.split "foobar" "foobar""#),
Value::List(List::new(vec![
Value::String("".into()),
Value::List(List::new(vec![])),
Value::String("".into())
]))
);
}
#[test]
fn test_split_no_match() {
assert_eq!(
eval(r#"builtins.split "fo+" "f""#),
Value::List(List::new(vec![Value::String("f".into())]))
);
}
#[test]
fn test_split_with_capture_group() {
assert_eq!(
eval(r#"builtins.split "(fo*)" "foobar""#),
Value::List(List::new(vec![
Value::String("".into()),
Value::List(List::new(vec![Value::String("foo".into())])),
Value::String("bar".into())
]))
);
}
#[test]
fn test_split_multiple_matches() {
assert_eq!(
eval(r#"builtins.split "(b)" "foobarbaz""#),
Value::List(List::new(vec![
Value::String("foo".into()),
Value::List(List::new(vec![Value::String("b".into())])),
Value::String("ar".into()),
Value::List(List::new(vec![Value::String("b".into())])),
Value::String("az".into())
]))
);
}
#[test]
fn test_split_with_multiple_groups() {
assert_eq!(
eval(r#"builtins.split "(f)(o*)" "foo""#),
Value::List(List::new(vec![
Value::String("".into()),
Value::List(List::new(vec![
Value::String("f".into()),
Value::String("oo".into())
])),
Value::String("".into())
]))
);
}
#[test]
fn test_split_with_optional_groups() {
assert_eq!(
eval(r#"builtins.split "(a)|(c)" "abc""#),
Value::List(List::new(vec![
Value::String("".into()),
Value::List(List::new(vec![Value::String("a".into()), Value::Null])),
Value::String("b".into()),
Value::List(List::new(vec![Value::Null, Value::String("c".into())])),
Value::String("".into())
]))
);
}
#[test]
fn test_split_greedy_matching() {
assert_eq!(
eval(r#"builtins.split "(o+)" "oooofoooo""#),
Value::List(List::new(vec![
Value::String("".into()),
Value::List(List::new(vec![Value::String("oooo".into())])),
Value::String("f".into()),
Value::List(List::new(vec![Value::String("oooo".into())])),
Value::String("".into())
]))
);
}
#[test]
fn test_split_posix_classes() {
assert_eq!(
eval(r#"builtins.split "([[:upper:]]+)" " FOO ""#),
Value::List(List::new(vec![
Value::String(" ".into()),
Value::List(List::new(vec![Value::String("FOO".into())])),
Value::String(" ".into())
]))
);
}
#[test]
fn test_replace_basic() {
assert_eq!(
eval(r#"builtins.replaceStrings ["o"] ["a"] "foobar""#),
Value::String("faabar".into())
);
}
#[test]
fn test_replace_with_empty() {
assert_eq!(
eval(r#"builtins.replaceStrings ["o"] [""] "foobar""#),
Value::String("fbar".into())
);
}
#[test]
fn test_replace_multiple_patterns() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "a"] ["a" "oo"] "foobar""#),
Value::String("faboor".into())
);
}
#[test]
fn test_replace_first_match_wins() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo" "oo"] ["u" "i"] "foobar""#),
Value::String("fubar".into())
);
}
#[test]
fn test_replace_empty_pattern() {
assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] "abc""#),
Value::String("XaXbXcX".into())
);
}
#[test]
fn test_replace_empty_pattern_empty_string() {
assert_eq!(
eval(r#"builtins.replaceStrings [""] ["X"] """#),
Value::String("X".into())
);
}
#[test]
fn test_replace_simple_char() {
assert_eq!(
eval(r#"builtins.replaceStrings ["-"] ["_"] "a-b""#),
Value::String("a_b".into())
);
}
#[test]
fn test_replace_longer_pattern() {
assert_eq!(
eval(r#"builtins.replaceStrings ["oo"] ["u"] "foobar""#),
Value::String("fubar".into())
);
}
#[test]
fn test_replace_different_lengths() {
let result = std::panic::catch_unwind(|| {
eval(r#"builtins.replaceStrings ["a" "b"] ["x"] "test""#)
});
assert!(result.is_err());
}
#[test]
fn test_split_version_simple() {
assert_eq!(
eval(r#"builtins.splitVersion "1.2.3""#),
Value::List(List::new(vec![
Value::String("1".into()),
Value::String("2".into()),
Value::String("3".into())
]))
);
}
#[test]
fn test_split_version_with_pre() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3.0pre1234""#),
Value::List(List::new(vec![
Value::String("2".into()),
Value::String("3".into()),
Value::String("0".into()),
Value::String("pre".into()),
Value::String("1234".into())
]))
);
}
#[test]
fn test_split_version_with_letters() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3a""#),
Value::List(List::new(vec![
Value::String("2".into()),
Value::String("3".into()),
Value::String("a".into())
]))
);
}
#[test]
fn test_split_version_with_dashes() {
assert_eq!(
eval(r#"builtins.splitVersion "2.3-beta1""#),
Value::List(List::new(vec![
Value::String("2".into()),
Value::String("3".into()),
Value::String("beta".into()),
Value::String("1".into())
]))
);
}
#[test]
fn test_split_version_empty() {
assert_eq!(
eval(r#"builtins.splitVersion """#),
Value::List(List::new(vec![]))
);
}

15
shell.nix Normal file
View File

@@ -0,0 +1,15 @@
let
lockFile = builtins.fromJSON (builtins.readFile ./flake.lock);
flake-compat-node = lockFile.nodes.${lockFile.nodes.root.inputs.flake-compat};
flake-compat = builtins.fetchTarball {
inherit (flake-compat-node.locked) url;
sha256 = flake-compat-node.locked.narHash;
};
flake = (
import flake-compat {
src = ./.;
}
);
in
flake.shellNix