Compare commits

...

14 Commits

31 changed files with 1466 additions and 537 deletions

36
Cargo.lock generated
View File

@@ -1927,11 +1927,11 @@ dependencies = [
"rnix",
"rowan",
"rusqlite",
"rust-embed",
"rustyline",
"serde",
"serde_json",
"sha2",
"sourcemap",
"string-interner",
"tar",
"tempfile",
@@ -2587,6 +2587,40 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rust-embed"
version = "8.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04113cb9355a377d83f06ef1f0a45b8ab8cd7d8b1288160717d66df5c7988d27"
dependencies = [
"rust-embed-impl",
"rust-embed-utils",
"walkdir",
]
[[package]]
name = "rust-embed-impl"
version = "8.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0902e4c7c8e997159ab384e6d0fc91c221375f6894346ae107f47dd0f3ccaa"
dependencies = [
"proc-macro2",
"quote",
"rust-embed-utils",
"syn",
"walkdir",
]
[[package]]
name = "rust-embed-utils"
version = "8.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bcdef0be6fe7f6fa333b1073c949729274b05f123a0ad7efcb8efd878e5c3b1"
dependencies = [
"sha2",
"walkdir",
]
[[package]]
name = "rustc-demangle"
version = "0.1.27"

View File

@@ -30,6 +30,8 @@ hashbrown = "0.16"
petgraph = "0.8"
string-interner = "0.19"
rust-embed="8.11"
itertools = "0.14"
regex = "1.11"
@@ -41,7 +43,6 @@ nix-nar = "0.3"
sha2 = "0.10"
hex = "0.4"
sourcemap = "9.0"
base64 = "0.22"
# Fetcher dependencies

View File

@@ -31,6 +31,7 @@ export const hasAttr =
(set: NixValue): boolean =>
Object.hasOwn(forceAttrs(set), forceStringValue(s));
let counter = 0;
export const mapAttrs =
(f: NixValue) =>
(attrs: NixValue): NixAttrs => {
@@ -38,7 +39,8 @@ export const mapAttrs =
const forcedF = forceFunction(f);
const newAttrs: NixAttrs = {};
for (const key in forcedAttrs) {
newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]));
newAttrs[key] = createThunk(() => forceFunction(forcedF(key))(forcedAttrs[key]), `created by mapAttrs (${counter})`);
counter += 1;
}
return newAttrs;
};
@@ -49,9 +51,10 @@ export const removeAttrs =
const new_attrs: NixAttrs = {};
const forced_attrs = forceAttrs(attrs);
const forced_list = forceList(list);
const keys_to_remove = new Set(forced_list.map(forceStringValue));
for (const key in forced_attrs) {
if (!(key in forced_list)) {
if (!keys_to_remove.has(key)) {
new_attrs[key] = forced_attrs[key];
}
}
@@ -104,3 +107,55 @@ export const groupBy =
}
return attrs;
};
export const zipAttrsWith =
(f: NixValue) =>
(list: NixValue): NixValue => {
const listForced = forceList(list);
// Map to collect all values for each attribute name
const attrMap = new Map<string, NixValue[]>();
// Iterate through each attribute set in the list
for (const item of listForced) {
const attrs = forceAttrs(item);
// Collect all attribute names and their values
for (const [key, value] of Object.entries(attrs)) {
if (!attrMap.has(key)) {
attrMap.set(key, []);
}
attrMap.get(key)!.push(value);
}
}
// Build the result attribute set
const result: Record<string, NixValue> = {};
for (const [name, values] of attrMap.entries()) {
// Apply f to name and values list
// f is curried: f name values
result[name] = createThunk(() => forceFunction(forceFunction(f)(name))(values));
}
return result;
};
export const unsafeGetAttrPos =
(attrName: NixValue) =>
(attrSet: NixValue): NixValue => {
const name = forceStringValue(attrName);
const attrs = forceAttrs(attrSet);
if (!(name in attrs)) {
return null;
}
const positions = (attrs as any)[Nix.ATTR_POSITIONS];
if (!positions || !(name in positions)) {
return null;
}
const span = positions[name];
return Nix.mkPos(span);
};

View File

@@ -8,7 +8,7 @@ import { force } from "../thunk";
import { type NixStringContext, mkStringWithContext, addBuiltContext } from "../string-context";
import { forceFunction } from "../type-assert";
import { nixValueToJson } from "../conversion";
import { typeOf } from "./type-check";
import { isAttrs, isPath, typeOf } from "./type-check";
const convertJsonToNix = (json: unknown): NixValue => {
if (json === null) {
@@ -283,6 +283,16 @@ export const coerceToStringWithContext = (
* - Preserves string context if present
*/
export const coerceToPath = (value: NixValue, outContext?: NixStringContext): string => {
const forced = force(value);
if (isPath(forced)) {
return forced.value;
}
if (isAttrs(forced) && Object.hasOwn(forced, "__toString")) {
const toStringFunc = forceFunction(forced.__toString);
return coerceToPath(toStringFunc(forced), outContext);
}
const pathStr = coerceToString(value, StringCoercionMode.Base, false, outContext);
if (pathStr === "") {

View File

@@ -1,9 +1,16 @@
import type { NixValue, NixAttrs } from "../types";
import { forceStringValue, forceList } from "../type-assert";
import { force } from "../thunk";
import { force, createThunk } from "../thunk";
import { type DerivationData, type OutputInfo, generateAterm } from "../derivation-helpers";
import { coerceToString, StringCoercionMode } from "./conversion";
import { type NixStringContext, extractInputDrvsAndSrcs, isStringWithContext } from "../string-context";
import {
type NixStringContext,
extractInputDrvsAndSrcs,
isStringWithContext,
mkStringWithContext,
addDrvDeepContext,
addBuiltContext,
} from "../string-context";
import { nixValueToJson } from "../conversion";
import { isNixPath } from "../types";
@@ -88,6 +95,7 @@ const extractEnv = (
structuredAttrs: boolean,
ignoreNulls: boolean,
outContext: NixStringContext,
drvName: string,
): Map<string, string> => {
const specialAttrs = new Set([
"name",
@@ -113,6 +121,49 @@ const extractEnv = (
}
jsonAttrs[key] = nixValueToJson(value, new Set(), outContext);
}
if (key === "allowedReferences") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'allowedReferences'; use ` +
`'outputChecks.<output>.allowedReferences' instead`
);
}
if (key === "allowedRequisites") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'allowedRequisites'; use ` +
`'outputChecks.<output>.allowedRequisites' instead`
);
}
if (key === "disallowedReferences") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'disallowedReferences'; use ` +
`'outputChecks.<output>.disallowedReferences' instead`
);
}
if (key === "disallowedRequisites") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'disallowedRequisites'; use ` +
`'outputChecks.<output>.disallowedRequisites' instead`
);
}
if (key === "maxSize") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'maxSize'; use ` +
`'outputChecks.<output>.maxSize' instead`
);
}
if (key === "maxClosureSize") {
console.warn(
`In a derivation named '${drvName}', 'structuredAttrs' disables the effect of ` +
`the derivation attribute 'maxClosureSize'; use ` +
`'outputChecks.<output>.maxClosureSize' instead`
);
}
}
env.set("__json", JSON.stringify(jsonAttrs));
} else {
@@ -174,8 +225,16 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
if ("__contentAddressed" in attrs && force(attrs.__contentAddressed) === true) {
throw new Error("ca derivations are not supported");
}
if ("impure" in attrs && force(attrs.impure) === true) {
throw new Error("impure derivations are not supported");
}
const drvArgs = extractArgs(attrs, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
env.set("name", drvName);
env.set("builder", builder);
@@ -277,37 +336,94 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
drvPath = Deno.core.ops.op_make_store_path("text", finalDrvHash, `${drvName}.drv`);
}
const result: NixAttrs = {
type: "derivation",
drvPath,
name: drvName,
builder,
system: platform,
};
const result: NixAttrs = {};
const drvPathContext = new Set<string>();
addDrvDeepContext(drvPathContext, drvPath);
result.drvPath = mkStringWithContext(drvPath, drvPathContext);
for (const [outputName, outputInfo] of outputInfos.entries()) {
result[outputName] = outputInfo.path;
}
if (outputInfos.has("out")) {
result.outPath = outputInfos.get("out")!.path;
}
if (drvArgs.length > 0) {
result.args = drvArgs;
}
if (!structuredAttrs) {
for (const [key, value] of env.entries()) {
if (!["name", "builder", "system", ...outputs].includes(key)) {
result[key] = value;
}
}
const outputContext = new Set<string>();
addBuiltContext(outputContext, drvPath, outputName);
result[outputName] = mkStringWithContext(outputInfo.path, outputContext);
}
return result;
};
export const derivation = (args: NixValue): NixAttrs => {
return derivationStrict(args);
const attrs = forceAttrs(args);
const strict = derivationStrict(args);
const outputs: string[] = extractOutputs(attrs);
const drvName = validateName(attrs);
const collectedContext: NixStringContext = new Set();
const builder = validateBuilder(attrs, collectedContext);
const platform = validateSystem(attrs);
const structuredAttrs = "__structuredAttrs" in attrs ? force(attrs.__structuredAttrs) === true : false;
const ignoreNulls = "__ignoreNulls" in attrs ? force(attrs.__ignoreNulls) === true : false;
const drvArgs = extractArgs(attrs, collectedContext);
const specialAttrs = new Set([
"name",
"builder",
"system",
"args",
"outputs",
"__structuredAttrs",
"__ignoreNulls",
"__contentAddressed",
"impure",
]);
const baseAttrs: NixAttrs = {
type: "derivation",
drvPath: strict.drvPath,
name: drvName,
builder,
system: platform,
};
if (drvArgs.length > 0) {
baseAttrs.args = drvArgs;
}
if (!structuredAttrs) {
for (const [key, value] of Object.entries(attrs)) {
if (!specialAttrs.has(key) && !outputs.includes(key)) {
const forcedValue = force(value);
if (!(ignoreNulls && forcedValue === null)) {
baseAttrs[key] = value;
}
}
}
}
const outputsList: NixAttrs[] = [];
for (const outputName of outputs) {
const outputObj: NixAttrs = {
...baseAttrs,
outPath: strict[outputName],
outputName,
};
outputsList.push(outputObj);
}
baseAttrs.drvAttrs = attrs;
for (const [i, outputName] of outputs.entries()) {
baseAttrs[outputName] = createThunk(() => outputsList[i], `output_${outputName}`);
}
baseAttrs.all = createThunk(() => outputsList, "all_outputs");
for (const outputObj of outputsList) {
outputObj.drvAttrs = attrs;
for (const [i, outputName] of outputs.entries()) {
outputObj[outputName] = createThunk(() => outputsList[i], `output_${outputName}`);
}
outputObj.all = createThunk(() => outputsList, "all_outputs");
}
return outputsList[0];
};

View File

@@ -172,6 +172,8 @@ export const builtins: any = {
intersectAttrs: mkPrimop(attrs.intersectAttrs, "intersectAttrs", 2),
catAttrs: mkPrimop(attrs.catAttrs, "catAttrs", 2),
groupBy: mkPrimop(attrs.groupBy, "groupBy", 2),
zipAttrsWith: mkPrimop(attrs.zipAttrsWith, "zipAttrsWith", 2),
unsafeGetAttrPos: mkPrimop(attrs.unsafeGetAttrPos, "unsafeGetAttrPos", 2),
stringLength: mkPrimop(string.stringLength, "stringLength", 1),
substring: mkPrimop(string.substring, "substring", 3),
@@ -231,7 +233,6 @@ export const builtins: any = {
1,
),
unsafeDiscardStringContext: mkPrimop(misc.unsafeDiscardStringContext, "unsafeDiscardStringContext", 1),
unsafeGetAttrPos: mkPrimop(misc.unsafeGetAttrPos, "unsafeGetAttrPos", 2),
addDrvOutputDependencies: mkPrimop(misc.addDrvOutputDependencies, "addDrvOutputDependencies", 2),
compareVersions: mkPrimop(misc.compareVersions, "compareVersions", 2),
flakeRefToString: mkPrimop(misc.flakeRefToString, "flakeRefToString", 1),
@@ -247,13 +248,12 @@ export const builtins: any = {
splitVersion: mkPrimop(misc.splitVersion, "splitVersion", 1),
traceVerbose: mkPrimop(misc.traceVerbose, "traceVerbose", 2),
tryEval: mkPrimop(misc.tryEval, "tryEval", 1),
zipAttrsWith: mkPrimop(misc.zipAttrsWith, "zipAttrsWith", 2),
builtins: createThunk(() => builtins),
builtins: createThunk(() => builtins, "builtins"),
currentSystem: createThunk(() => {
return "x86_64-linux";
}),
currentTime: createThunk(() => Date.now()),
}, "currentSystem"),
currentTime: createThunk(() => Date.now(), "currentTime"),
false: false,
true: true,

View File

@@ -3,15 +3,16 @@
* Implemented via Rust ops exposed through deno_core
*/
import { forceAttrs, forceBool, forceStringValue } from "../type-assert";
import type { NixValue, NixAttrs } from "../types";
import { isNixPath } from "../types";
import { forceAttrs, forceBool, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixValue, NixAttrs, NixPath } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types";
import { force } from "../thunk";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { getPathValue } from "../path";
import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext } from "../string-context";
import { isPath } from "./type-check";
import { getCorepkg } from "../corepkgs";
export const importFunc = (path: NixValue): NixValue => {
const context: NixStringContext = new Set();
@@ -291,8 +292,14 @@ const autoDetectAndFetch = (attrs: NixAttrs): NixAttrs => {
return { outPath: fetchurl(attrs) };
};
export const readDir = (path: NixValue): never => {
throw new Error("Not implemented: readDir");
export const readDir = (path: NixValue): NixAttrs => {
const pathStr = coerceToPath(path);
const entries: Record<string, string> = Deno.core.ops.op_read_dir(pathStr);
const result: NixAttrs = {};
for (const [name, type] of Object.entries(entries)) {
result[name] = type;
}
return result;
};
export const readFile = (path: NixValue): string => {
@@ -300,8 +307,9 @@ export const readFile = (path: NixValue): string => {
return Deno.core.ops.op_read_file(pathStr);
};
export const readFileType = (path: NixValue): never => {
throw new Error("Not implemented: readFileType");
export const readFileType = (path: NixValue): string => {
const pathStr = coerceToPath(path);
return Deno.core.ops.op_read_file_type(pathStr);
};
export const pathExists = (path: NixValue): boolean => {
@@ -390,10 +398,70 @@ export const filterSource = (args: NixValue): never => {
throw new Error("Not implemented: filterSource");
};
const suffixIfPotentialMatch = (prefix: string, path: string): string | null => {
const n = prefix.length;
const needSeparator = n > 0 && n < path.length;
if (needSeparator && path[n] !== "/") {
return null;
}
if (!path.startsWith(prefix)) {
return null;
}
return needSeparator ? path.substring(n + 1) : path.substring(n);
};
export const findFile =
(search: NixValue) =>
(lookup: NixValue): never => {
throw new Error("Not implemented: findFile");
(searchPath: NixValue) =>
(lookupPath: NixValue): NixPath => {
const forcedSearchPath = forceList(searchPath);
const lookupPathStr = forceStringNoCtx(lookupPath);
for (const item of forcedSearchPath) {
const attrs = forceAttrs(item);
const prefix = "prefix" in attrs ? forceStringNoCtx(attrs.prefix) : "";
if (!("path" in attrs)) {
throw new Error("findFile: search path element is missing 'path' attribute");
}
const suffix = suffixIfPotentialMatch(prefix, lookupPathStr);
if (suffix === null) {
continue;
}
const context: NixStringContext = new Set();
const pathVal = coerceToString(attrs.path, StringCoercionMode.Interpolation, false, context);
if (context.size > 0) {
throw new Error("findFile: path with string context is not yet supported");
}
const resolvedPath = Deno.core.ops.op_resolve_path(pathVal, "");
const candidatePath = suffix.length > 0
? Deno.core.ops.op_resolve_path(suffix, resolvedPath)
: resolvedPath;
if (Deno.core.ops.op_path_exists(candidatePath)) {
return { [IS_PATH]: true, value: candidatePath };
}
}
if (lookupPathStr.startsWith("nix/")) {
const corepkgName = lookupPathStr.substring(4);
const corepkgContent = getCorepkg(corepkgName);
if (corepkgContent !== undefined) {
// FIXME: special path type
return { [IS_PATH]: true, value: `<nix/${corepkgName}>` };
}
}
throw new CatchableError(`file '${lookupPathStr}' was not found in the Nix search path`);
};
export const getEnv = (s: NixValue): string => {

View File

@@ -2,12 +2,12 @@
* Miscellaneous builtin functions
*/
import { force } from "../thunk";
import { createThunk, force } from "../thunk";
import { CatchableError } from "../types";
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types";
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString } from "../type-assert";
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert";
import * as context from "./context";
import { compareValues, op } from "../operators";
import { compareValues } from "../operators";
import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check";
import { OrderedSet } from "js-sdsl";
import {
@@ -15,13 +15,13 @@ import {
getStringValue,
getStringContext,
mkStringWithContext,
mergeContexts,
} from "../string-context";
export const addErrorContext =
(e1: NixValue) =>
(e2: NixValue): never => {
throw new Error("Not implemented: addErrorContext");
(e2: NixValue): NixValue => {
console.log("[WARNING]: addErrorContext not implemented");
return e2;
};
export const appendContext = context.appendContext;
@@ -50,10 +50,6 @@ export const unsafeDiscardOutputDependency = context.unsafeDiscardOutputDependen
export const unsafeDiscardStringContext = context.unsafeDiscardStringContext;
export const unsafeGetAttrPos = (s: NixValue): never => {
throw new Error("Not implemented: unsafeGetAttrPos");
};
export const addDrvOutputDependencies = context.addDrvOutputDependencies;
export const compareVersions =
@@ -232,8 +228,21 @@ export const outputOf =
throw new Error("Not implemented: outputOf");
};
export const parseDrvName = (s: NixValue): never => {
throw new Error("Not implemented: parseDrvName");
export const parseDrvName = (s: NixValue): NixAttrs => {
const fullName = forceStringNoCtx(s);
let name = fullName;
let version = "";
for (let i = 0; i < fullName.length; ++i) {
if (fullName[i] === '-' && i + 1 < fullName.length && !/[a-zA-Z]/.test(fullName[i + 1])) {
name = fullName.substring(0, i);
version = fullName.substring(i + 1);
break;
}
}
return {
name,
version
}
};
export const parseFlakeName = (s: NixValue): never => {
@@ -355,37 +364,3 @@ export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue
}
}
};
export const zipAttrsWith =
(f: NixValue) =>
(list: NixValue): NixValue => {
const listForced = forceList(list);
// Map to collect all values for each attribute name
const attrMap = new Map<string, NixValue[]>();
// Iterate through each attribute set in the list
for (const item of listForced) {
const attrs = forceAttrs(force(item) as NixValue);
// Collect all attribute names and their values
for (const [key, value] of Object.entries(attrs)) {
if (!attrMap.has(key)) {
attrMap.set(key, []);
}
attrMap.get(key)!.push(value);
}
}
// Build the result attribute set
const result: Record<string, NixValue> = {};
for (const [name, values] of attrMap.entries()) {
// Apply f to name and values list
// f is curried: f name values
const fWithName = forceFunction(f)(name);
result[name] = forceFunction(fWithName)(values);
}
return result;
};

View File

@@ -0,0 +1,73 @@
export const FETCHURL_NIX = `{
system ? "", # obsolete
url,
hash ? "", # an SRI hash
# Legacy hash specification
md5 ? "",
sha1 ? "",
sha256 ? "",
sha512 ? "",
outputHash ?
if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
executable ? false,
unpack ? false,
name ? baseNameOf (toString url),
impure ? false,
}:
derivation (
{
builder = "builtin:fetchurl";
# New-style output content requirements.
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit
name
url
executable
unpack
;
system = "builtin";
# No need to double the amount of network traffic
preferLocalBuild = true;
# This attribute does nothing; it's here to avoid changing evaluation results.
impureEnvVars = [
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
# To make "nix-prefetch-url" work.
urls = [ url ];
}
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
)
`;

View File

@@ -0,0 +1,9 @@
import { FETCHURL_NIX } from "./fetchurl.nix";
export const COREPKGS: Record<string, string> = {
"fetchurl.nix": FETCHURL_NIX,
};
export const getCorepkg = (name: string): string | undefined => {
return COREPKGS[name];
};

View File

@@ -169,7 +169,15 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
* @returns NixPath object with absolute path
*/
export const resolvePath = (currentDir: string, path: NixValue): NixPath => {
const pathStr = forceStringValue(path);
const forced = force(path);
let pathStr: string;
if (isNixPath(forced)) {
pathStr = forced.value;
} else {
pathStr = forceStringValue(path);
}
const resolved = Deno.core.ops.op_resolve_path(currentDir, pathStr);
return mkPath(resolved);
};
@@ -242,17 +250,20 @@ export const selectWithDefault = (
}
};
function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], default_val: NixValue): NixValue {
let attrs = forceAttrs(obj);
function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], defaultVal: NixValue): NixValue {
let attrs = force(obj);
if (!isAttrs(attrs)) {
return defaultVal;
}
for (const attr of attrpath.slice(0, -1)) {
const key = forceStringValue(attr);
if (!(key in attrs)) {
return default_val;
return defaultVal;
}
const cur = force(attrs[key]);
if (!isAttrs(cur)) {
return default_val;
return defaultVal;
}
attrs = cur;
}
@@ -261,7 +272,7 @@ function selectWithDefault_impl(obj: NixValue, attrpath: NixValue[], default_val
if (last in attrs) {
return attrs[last];
}
return default_val;
return defaultVal;
}
export const hasAttr = (obj: NixValue, attrpath: NixValue[]): NixBool => {
@@ -377,3 +388,7 @@ export const ifFunc = (cond: NixValue, consq: NixValue, alter: NixValue) => {
}
return alter;
};
export const mkPos = (span: string): NixAttrs => {
return Deno.core.ops.op_decode_span(span);
};

View File

@@ -18,12 +18,13 @@ import {
pushContext,
popContext,
withContext,
mkPos,
} from "./helpers";
import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins";
import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import { HAS_CONTEXT } from "./string-context";
import { IS_PATH, mkFunction } from "./types";
import { IS_PATH, mkAttrs, mkFunction, mkAttrsWithPos, ATTR_POSITIONS } from "./types";
import { forceBool } from "./type-assert";
export type NixRuntime = typeof Nix;
@@ -52,7 +53,11 @@ export const Nix = {
coerceToString,
concatStringsWithContext,
StringCoercionMode,
mkAttrs,
mkAttrsWithPos,
mkFunction,
mkPos,
ATTR_POSITIONS,
pushContext,
popContext,

View File

@@ -85,6 +85,17 @@ export const forceString = (value: NixValue): NixString => {
throw new TypeError(`Expected string, got ${typeOf(forced)}`);
};
export const forceStringNoCtx = (value: NixValue): string => {
const forced = force(value);
if (typeof forced === "string") {
return forced;
}
if (isStringWithContext(forced)) {
throw new TypeError(`the string '${forced.value}' is not allowed to refer to a store path`)
}
throw new TypeError(`Expected string, got ${typeOf(forced)}`);
}
/**
* Force a value and assert it's a boolean
* @throws TypeError if value is not a boolean after forcing

View File

@@ -2,10 +2,11 @@
* Core TypeScript type definitions for nix-js runtime
*/
import { IS_THUNK } from "./thunk";
import { type StringWithContext, HAS_CONTEXT, isStringWithContext } from "./string-context";
import { force, IS_THUNK } from "./thunk";
import { type StringWithContext, HAS_CONTEXT, isStringWithContext, getStringContext } from "./string-context";
import { op } from "./operators";
import { forceAttrs } from "./type-assert";
import { forceAttrs, forceStringNoCtx } from "./type-assert";
import { isString, typeOf } from "./builtins/type-check";
export { HAS_CONTEXT, isStringWithContext };
export type { StringWithContext };
@@ -62,11 +63,62 @@ export class NixArgs {
}
}
}
export const mkFunction = (f: (arg: NixValue) => NixValue, required: string[], optional: string[], ellipsis: boolean): NixFunction => {
export const mkFunction = (
f: (arg: NixValue) => NixValue,
required: string[],
optional: string[],
ellipsis: boolean,
): NixFunction => {
const func = f as NixFunction;
func.args = new NixArgs(required, optional, ellipsis);
return func
return func;
};
export const mkAttrs = (attrs: NixAttrs, keys: NixValue[], values: NixValue[]): NixAttrs => {
const len = keys.length;
for (let i = 0; i < len; i++) {
const key = force(keys[i]);
if (key === null) {
continue;
}
const str = forceStringNoCtx(key);
attrs[str] = values[i];
}
return attrs;
};
const ATTR_POSITIONS = Symbol("attrPositions");
export const mkAttrsWithPos = (
attrs: NixAttrs,
positions: Record<string, string>,
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] }
): NixAttrs => {
if (dyns) {
const len = dyns.dynKeys.length;
for (let i = 0; i < len; i++) {
const key = force(dyns.dynKeys[i]);
if (key === null) {
continue;
}
const str = forceStringNoCtx(key);
attrs[str] = dyns.dynVals[i];
positions[str] = dyns.dynSpans[i];
}
}
if (Object.keys(positions).length > 0) {
Object.defineProperty(attrs, ATTR_POSITIONS, {
value: positions,
enumerable: false,
writable: false,
});
}
return attrs;
};
export { ATTR_POSITIONS };
/**
* Interface for lazy thunk values

View File

@@ -38,8 +38,11 @@ declare global {
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_read_file(path: string): string;
function op_read_file_type(path: string): string;
function op_read_dir(path: string): Record<string, string>;
function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string;
function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null };
function op_make_store_path(ty: string, hash_hex: string, name: string): string;
function op_output_path_name(drv_name: string, output_name: string): string;
function op_make_fixed_output_path(

View File

@@ -40,6 +40,7 @@ pub(crate) trait CodegenContext {
fn get_current_dir(&self) -> &Path;
fn get_store_dir(&self) -> &str;
fn get_current_source_id(&self) -> usize;
fn get_current_source(&self) -> crate::error::Source;
}
trait EscapeQuote {
@@ -117,9 +118,19 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
Ir::Arg(x) => format!("arg{}", x.inner.0),
Ir::Let(x) => x.compile(ctx),
Ir::Select(x) => x.compile(ctx),
&Ir::Thunk(Thunk { inner: expr_id, .. }) => {
&Ir::Thunk(Thunk {
inner: expr_id,
span,
}) => {
let inner = ctx.get_ir(expr_id).compile(ctx);
format!("Nix.createThunk(()=>({}),\"expr{}\")", inner, expr_id.0)
format!(
"Nix.createThunk(()=>({}),\"expr{} {}:{}:{}\")",
inner,
expr_id.0,
ctx.get_current_source().get_name(),
usize::from(span.start()),
usize::from(span.end())
)
}
&Ir::ExprRef(ExprRef { inner: expr_id, .. }) => {
format!("expr{}", expr_id.0)
@@ -160,6 +171,10 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
)
}
}
Ir::CurPos(cur_pos) => {
let span_str = encode_span(cur_pos.span, ctx);
format!("Nix.mkPos({})", span_str)
}
}
}
}
@@ -316,8 +331,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
.attrpath
.iter()
.map(|attr| match attr {
Attr::Str(sym) => ctx.get_sym(*sym).escape_quote(),
Attr::Dynamic(expr_id) => ctx.get_ir(*expr_id).compile(ctx),
Attr::Str(sym, _) => ctx.get_sym(*sym).escape_quote(),
Attr::Dynamic(expr_id, _) => ctx.get_ir(*expr_id).compile(ctx),
})
.join(",");
let span_str = encode_span(self.span, ctx);
@@ -335,9 +350,10 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
fn compile(&self, ctx: &Ctx) -> String {
let mut attrs = Vec::new();
let mut attr_positions = Vec::new();
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
for (&sym, &expr) in &self.stcs {
for (&sym, &(expr, attr_span)) in &self.stcs {
let key = ctx.get_sym(sym);
let value_code = ctx.get_ir(expr).compile(ctx);
@@ -351,28 +367,52 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
value_code
};
attrs.push(format!("{}:{}", key.escape_quote(), value));
let attr_pos_str = encode_span(attr_span, ctx);
attr_positions.push(format!("{}:{}", key.escape_quote(), attr_pos_str));
}
// FIXME: duplicated key
for (key_expr, value_expr) in &self.dyns {
let key = ctx.get_ir(*key_expr).compile(ctx);
let value_code = ctx.get_ir(*value_expr).compile(ctx);
let value = if stack_trace_enabled {
let value_span = encode_span(ctx.get_ir(*value_expr).span(), ctx);
if !self.dyns.is_empty() {
let (keys, vals, dyn_spans) = self
.dyns
.iter()
.map(|(key, val, attr_span)| {
let key = ctx.get_ir(*key).compile(ctx);
let val_expr = ctx.get_ir(*val);
let val = val_expr.compile(ctx);
let span = val_expr.span();
let val = if stack_trace_enabled {
let span = encode_span(span, ctx);
format!(
"Nix.withContext(\"while evaluating a dynamic attribute\",{},()=>({}))",
value_span, value_code
span, val
)
} else {
value_code
val
};
attrs.push(format!("[{}]:{}", key, value));
}
let dyn_span_str = encode_span(*attr_span, ctx);
(key, val, dyn_span_str)
})
.multiunzip::<(Vec<_>, Vec<_>, Vec<_>)>();
format!(
"Nix.mkAttrsWithPos({{{}}},{{{}}},{{dynKeys:[{}],dynVals:[{}],dynSpans:[{}]}})",
attrs.join(","),
attr_positions.join(","),
keys.join(","),
vals.join(","),
dyn_spans.join(",")
)
} else if !attr_positions.is_empty() {
format!(
"Nix.mkAttrsWithPos({{{}}},{{{}}})",
attrs.join(","),
attr_positions.join(",")
)
} else {
format!("{{{}}}", attrs.join(","))
}
}
}
impl<Ctx: CodegenContext> Compile<Ctx> for List {
fn compile(&self, ctx: &Ctx) -> String {
@@ -431,8 +471,8 @@ impl<Ctx: CodegenContext> Compile<Ctx> for HasAttr {
.rhs
.iter()
.map(|attr| match attr {
Attr::Str(sym) => ctx.get_sym(*sym).escape_quote(),
Attr::Dynamic(expr_id) => ctx.get_ir(*expr_id).compile(ctx),
Attr::Str(sym, _) => ctx.get_sym(*sym).escape_quote(),
Attr::Dynamic(expr_id, _) => ctx.get_ir(*expr_id).compile(ctx),
})
.join(",");
format!("Nix.hasAttr({lhs},[{attrpath}])")

View File

@@ -11,51 +11,11 @@ use crate::codegen::{CodegenContext, compile};
use crate::error::{Error, Result, Source};
use crate::ir::{
Arg, ArgId, Bool, Builtin, Downgrade as _, DowngradeContext, ExprId, ExprRef, Ir, Null, SymId,
ToIr as _, synthetic_span,
Thunk, ToIr as _, synthetic_span,
};
use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{StoreBackend, StoreConfig};
use crate::store::{Store, StoreBackend, StoreConfig};
use crate::value::Value;
use std::sync::Arc;
mod private {
use super::*;
use std::ptr::NonNull;
pub struct CtxPtr(NonNull<Ctx>);
impl CtxPtr {
pub fn new(ctx: &mut Ctx) -> Self {
unsafe { CtxPtr(NonNull::new_unchecked(ctx)) }
}
fn as_ref(&self) -> &Ctx {
// SAFETY: This is safe since inner `NonNull<Ctx>` is obtained from `&mut Ctx`
unsafe { self.0.as_ref() }
}
fn as_mut(&mut self) -> &mut Ctx {
// SAFETY: This is safe since inner `NonNull<Ctx>` is obtained from `&mut Ctx`
unsafe { self.0.as_mut() }
}
}
impl RuntimeContext for CtxPtr {
fn get_current_dir(&self) -> &Path {
self.as_ref().get_current_dir()
}
fn add_source(&mut self, source: Source) {
self.as_mut().sources.push(source);
}
fn compile_code(&mut self, source: Source) -> Result<String> {
self.as_mut().compile_code(source)
}
fn get_current_source(&self) -> Source {
self.as_ref().get_current_source()
}
fn get_source(&self, id: usize) -> Source {
self.as_ref().get_source(id)
}
}
}
use private::CtxPtr;
#[derive(Debug)]
pub(crate) struct SccInfo {
@@ -65,7 +25,7 @@ pub(crate) struct SccInfo {
pub struct Context {
ctx: Ctx,
runtime: Runtime<CtxPtr>,
runtime: Runtime<Ctx>,
}
impl Context {
@@ -82,14 +42,9 @@ impl Context {
tracing::debug!("Compiling code");
let code = self.compile_code(source)?;
self.runtime
.op_state()
.borrow_mut()
.put(self.ctx.store.clone());
tracing::debug!("Executing JavaScript");
self.runtime
.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
.eval(format!("Nix.force({code})"), &mut self.ctx)
}
pub fn compile_code(&mut self, source: Source) -> Result<String> {
@@ -98,7 +53,7 @@ impl Context {
#[allow(dead_code)]
pub(crate) fn eval_js(&mut self, code: String) -> Result<Value> {
self.runtime.eval(code, CtxPtr::new(&mut self.ctx))
self.runtime.eval(code, &mut self.ctx)
}
pub fn get_store_dir(&self) -> &str {
@@ -111,7 +66,7 @@ pub(crate) struct Ctx {
symbols: DefaultStringInterner,
global: NonNull<HashMap<SymId, ExprId>>,
sources: Vec<Source>,
store: Arc<StoreBackend>,
store: StoreBackend,
}
impl Ctx {
@@ -200,7 +155,7 @@ impl Ctx {
}
let config = StoreConfig::from_env();
let store = Arc::new(StoreBackend::new(config)?);
let store = StoreBackend::new(config)?;
Ok(Self {
symbols,
@@ -210,9 +165,7 @@ impl Ctx {
store,
})
}
}
impl Ctx {
pub(crate) fn downgrade_ctx<'a>(&'a mut self) -> DowngradeCtx<'a> {
let global_ref = unsafe { self.global.as_ref() };
DowngradeCtx::new(self, global_ref)
@@ -277,11 +230,32 @@ impl CodegenContext for Ctx {
.checked_sub(1)
.expect("current_source not set")
}
fn get_current_source(&self) -> crate::error::Source {
self.sources.last().expect("current_source not set").clone()
}
fn get_store_dir(&self) -> &str {
self.store.as_store().get_store_dir()
}
}
impl RuntimeContext for Ctx {
fn get_current_dir(&self) -> &Path {
self.get_current_dir()
}
fn add_source(&mut self, source: Source) {
self.sources.push(source);
}
fn compile_code(&mut self, source: Source) -> Result<String> {
self.compile_code(source)
}
fn get_source(&self, id: usize) -> Source {
self.get_source(id)
}
fn get_store(&self) -> &dyn Store {
self.store.as_store()
}
}
struct DependencyTracker {
graph: DiGraphMap<ExprId, ()>,
current_binding: Option<ExprId>,
@@ -351,6 +325,38 @@ impl DowngradeContext for DowngradeCtx<'_> {
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
}
fn get_ir(&self, id: ExprId) -> &Ir {
if id.0 < self.ctx.irs.len() {
self.ctx.irs.get(id.0).expect("unreachable")
} else {
self.irs
.get(id.0 - self.ctx.irs.len())
.expect("ExprId out of bounds")
.as_ref()
.expect("maybe_thunk called on an extracted expr")
}
}
fn maybe_thunk(&mut self, id: ExprId) -> ExprId {
let ir = self.get_ir(id);
match ir {
Ir::Builtin(_)
| Ir::Builtins(_)
| Ir::Int(_)
| Ir::Float(_)
| Ir::Bool(_)
| Ir::Null(_)
| Ir::Str(_) => id,
_ => self.new_expr(
Thunk {
inner: id,
span: ir.span(),
}
.to_ir(),
),
}
}
fn new_sym(&mut self, sym: String) -> SymId {
self.ctx.symbols.get_or_intern(sym)
}
@@ -435,20 +441,22 @@ impl DowngradeContext for DowngradeCtx<'_> {
use crate::ir::{Attr, Select};
let select = Select {
expr: namespace,
attrpath: vec![Attr::Str(sym)],
attrpath: vec![Attr::Str(sym, synthetic_span())],
default: result, // Link to outer With or None
span,
};
result = Some(self.new_expr(select.to_ir()));
}
result.ok_or_else(|| {
Error::downgrade_error(format!("'{}' not found", self.get_sym(sym)))
.with_span(span)
.with_source(self.get_current_source())
Error::downgrade_error(
format!("'{}' not found", self.get_sym(sym)),
self.get_current_source(),
span,
)
})
}
fn extract_expr(&mut self, id: ExprId) -> Ir {
fn extract_ir(&mut self, id: ExprId) -> Ir {
let local_id = id.0 - self.ctx.irs.len();
self.irs
.get_mut(local_id)
@@ -457,7 +465,7 @@ impl DowngradeContext for DowngradeCtx<'_> {
.expect("extract_expr called on an already extracted expr")
}
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
fn replace_ir(&mut self, id: ExprId, expr: Ir) {
let local_id = id.0 - self.ctx.irs.len();
let _ = self
.irs

View File

@@ -1,11 +1,15 @@
#![allow(unused_assignments)]
use std::path::{Path, PathBuf};
use std::sync::Arc;
use deno_core::error::JsError;
use deno_error::JsErrorClass as _;
use itertools::Itertools as _;
use miette::{Diagnostic, NamedSource, SourceSpan};
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use thiserror::Error;
use crate::{context::Ctx, runtime::RuntimeContext};
use crate::runtime::RuntimeContext;
pub type Result<T> = core::result::Result<T, Box<Error>>;
@@ -34,11 +38,7 @@ impl TryFrom<&str> for Source {
impl From<Source> for NamedSource<Arc<str>> {
fn from(value: Source) -> Self {
let name = match value.ty {
SourceType::Eval(_) => "«eval»".into(),
SourceType::Repl(_) => "«repl»".into(),
SourceType::File(path) => path.as_os_str().to_string_lossy().to_string(),
};
let name = value.get_name();
NamedSource::new(name, value.src.clone())
}
}
@@ -74,6 +74,14 @@ impl Source {
.expect("source file must have a parent dir"),
}
}
pub fn get_name(&self) -> String {
match &self.ty {
SourceType::Eval(_) => "«eval»".into(),
SourceType::Repl(_) => "«repl»".into(),
SourceType::File(path) => path.as_os_str().to_string_lossy().to_string(),
}
}
}
#[derive(Error, Debug, Diagnostic)]
@@ -108,6 +116,8 @@ pub enum Error {
message: String,
#[help]
js_backtrace: Option<String>,
#[related]
stack_trace: Vec<StackFrame>,
},
#[error("Internal error: {message}")]
@@ -133,10 +143,10 @@ impl Error {
.into()
}
pub fn downgrade_error(msg: String) -> Box<Self> {
pub fn downgrade_error(msg: String, src: Source, span: rnix::TextRange) -> Box<Self> {
Error::DowngradeError {
src: None,
span: None,
src: Some(src.into()),
span: Some(text_range_to_source_span(span)),
message: msg,
}
.into()
@@ -148,6 +158,7 @@ impl Error {
span: None,
message: msg,
js_backtrace: backtrace,
stack_trace: Vec::new(),
}
.into()
}
@@ -192,14 +203,72 @@ pub fn text_range_to_source_span(range: rnix::TextRange) -> SourceSpan {
}
/// Stack frame types from Nix evaluation
#[derive(Debug, Clone)]
pub(crate) struct NixStackFrame {
pub span: rnix::TextRange,
#[derive(Debug, Clone, Error, Diagnostic)]
#[error("{message}")]
pub struct StackFrame {
#[label]
pub span: SourceSpan,
#[help]
pub message: String,
pub source: Source,
#[source_code]
pub src: NamedSource<Arc<str>>,
}
pub(crate) fn parse_nix_stack(stack: &str, ctx: &impl RuntimeContext) -> Vec<NixStackFrame> {
const MAX_STACK_FRAMES: usize = 10;
const FRAMES_AT_START: usize = 5;
const FRAMES_AT_END: usize = 5;
pub(crate) fn parse_js_error(error: Box<JsError>, ctx: &impl RuntimeContext) -> Error {
let (span, src, frames) = if let Some(stack) = &error.stack {
let mut frames = parse_frames(stack, ctx);
if let Some(last_frame) = frames.pop() {
(
Some(text_range_to_source_span(last_frame.span)),
Some(last_frame.src.into()),
frames,
)
} else {
(None, None, frames)
}
} else {
(None, None, Vec::new())
};
let stack_trace = truncate_stack_trace(frames);
let message = error.get_message().to_string();
let js_backtrace = error.stack.map(|stack| {
stack
.lines()
.filter(|line| !line.starts_with("NIX_STACK_FRAME:"))
.join("\n")
});
Error::EvalError {
src,
span,
message,
js_backtrace,
stack_trace,
}
}
struct NixStackFrame {
span: rnix::TextRange,
message: String,
src: Source,
}
impl From<NixStackFrame> for StackFrame {
fn from(NixStackFrame { span, message, src }: NixStackFrame) -> Self {
StackFrame {
span: text_range_to_source_span(span),
message,
src: src.into(),
}
}
}
fn parse_frames(stack: &str, ctx: &impl RuntimeContext) -> Vec<NixStackFrame> {
let mut frames = Vec::new();
for line in stack.lines() {
@@ -213,7 +282,7 @@ pub(crate) fn parse_nix_stack(stack: &str, ctx: &impl RuntimeContext) -> Vec<Nix
continue;
}
let source = match parts[0].parse() {
let src = match parts[0].parse() {
Ok(id) => ctx.get_source(id),
Err(_) => continue,
};
@@ -236,11 +305,7 @@ pub(crate) fn parse_nix_stack(stack: &str, ctx: &impl RuntimeContext) -> Vec<Nix
}
};
frames.push(NixStackFrame {
span,
message,
source,
});
frames.push(NixStackFrame { span, message, src });
}
// Deduplicate consecutive identical frames
@@ -248,3 +313,34 @@ pub(crate) fn parse_nix_stack(stack: &str, ctx: &impl RuntimeContext) -> Vec<Nix
frames
}
fn truncate_stack_trace(frames: Vec<NixStackFrame>) -> Vec<StackFrame> {
let reversed: Vec<_> = frames.into_iter().rev().collect();
let total = reversed.len();
if total <= MAX_STACK_FRAMES {
return reversed.into_iter().map(Into::into).collect();
}
let omitted_count = total - FRAMES_AT_START - FRAMES_AT_END;
reversed
.into_iter()
.enumerate()
.filter_map(|(i, frame)| {
if i < FRAMES_AT_START {
Some(frame.into())
} else if i == FRAMES_AT_START {
Some(StackFrame {
span: text_range_to_source_span(frame.span),
message: format!("... ({} more frames omitted)", omitted_count),
src: frame.src.into(),
})
} else if i >= total - FRAMES_AT_END {
Some(frame.into())
} else {
None
}
})
.collect()
}

View File

@@ -3,6 +3,9 @@ use deno_core::op2;
use serde::Serialize;
use tracing::{debug, info, warn};
use crate::runtime::OpStateExt;
use crate::runtime::RuntimeContext;
mod archive;
pub(crate) mod cache;
mod download;
@@ -53,16 +56,13 @@ pub struct FetchHgResult {
#[op2]
#[serde]
pub fn op_fetch_url(
pub fn op_fetch_url<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] url: String,
#[string] expected_hash: Option<String>,
#[string] name: Option<String>,
executable: bool,
) -> Result<FetchUrlResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let _span = tracing::info_span!("op_fetch_url", url = %url).entered();
info!("fetchurl started");
@@ -128,9 +128,9 @@ pub fn op_fetch_url(
}
}
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.as_store()
.add_to_store(&file_name, &data, false, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
@@ -160,16 +160,13 @@ pub fn op_fetch_url(
#[op2]
#[serde]
pub fn op_fetch_tarball(
pub fn op_fetch_tarball<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] url: String,
#[string] expected_hash: Option<String>,
#[string] expected_nar_hash: Option<String>,
#[string] name: Option<String>,
) -> Result<FetchTarballResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let _span = tracing::info_span!("op_fetch_tarball", url = %url).entered();
info!("fetchTarball started");
@@ -264,9 +261,9 @@ pub fn op_fetch_tarball(
}
info!("Adding to store");
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.as_store()
.add_to_store_from_path(&dir_name, &extracted_path, vec![])
.map_err(|e| NixError::from(e.to_string()))?;
@@ -292,7 +289,7 @@ pub fn op_fetch_tarball(
#[op2]
#[serde]
pub fn op_fetch_git(
pub fn op_fetch_git<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] url: String,
#[string] git_ref: Option<String>,
@@ -302,19 +299,17 @@ pub fn op_fetch_git(
all_refs: bool,
#[string] name: Option<String>,
) -> Result<FetchGitResult, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let _span = tracing::info_span!("op_fetch_git", url = %url).entered();
info!("fetchGit started");
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
git::fetch_git(
&cache,
store.as_store(),
store,
&url,
git_ref.as_deref(),
rev.as_deref(),
@@ -377,11 +372,11 @@ fn base64_decode(input: &str) -> Result<Vec<u8>, String> {
Ok(output)
}
pub fn register_ops() -> Vec<deno_core::OpDecl> {
pub fn register_ops<Ctx: RuntimeContext>() -> Vec<deno_core::OpDecl> {
vec![
op_fetch_url(),
op_fetch_tarball(),
op_fetch_git(),
op_fetch_url::<Ctx>(),
op_fetch_tarball::<Ctx>(),
op_fetch_git::<Ctx>(),
op_fetch_hg(),
]
}

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use rusqlite::{Connection, OptionalExtension, params};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;

View File

@@ -1,5 +1,5 @@
use derive_more::{IsVariant, TryUnwrap, Unwrap};
use hashbrown::{HashMap, HashSet};
use hashbrown::HashMap;
use rnix::{TextRange, ast};
use string_interner::symbol::SymbolU32;
@@ -22,13 +22,15 @@ pub trait DowngradeContext {
fn new_expr(&mut self, expr: Ir) -> ExprId;
fn new_arg(&mut self, span: TextRange) -> ExprId;
fn maybe_thunk(&mut self, id: ExprId) -> ExprId;
fn new_sym(&mut self, sym: String) -> SymId;
fn get_sym(&self, id: SymId) -> &str;
fn lookup(&mut self, sym: SymId, span: TextRange) -> Result<ExprId>;
fn extract_expr(&mut self, id: ExprId) -> Ir;
fn replace_expr(&mut self, id: ExprId, expr: Ir);
fn get_ir(&self, id: ExprId) -> &Ir;
fn extract_ir(&mut self, id: ExprId) -> Ir;
fn replace_ir(&mut self, id: ExprId, expr: Ir);
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
fn get_current_source(&self) -> Source;
@@ -57,7 +59,7 @@ ir! {
Bool(bool),
Null,
Str { pub val: String },
AttrSet { pub stcs: HashMap<SymId, ExprId>, pub dyns: Vec<(ExprId, ExprId)> },
AttrSet { pub stcs: HashMap<SymId, (ExprId, rnix::TextRange)>, pub dyns: Vec<(ExprId, ExprId, rnix::TextRange)> },
List { pub items: Vec<ExprId> },
HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> },
@@ -76,6 +78,7 @@ ir! {
Thunk(ExprId),
Builtins,
Builtin(SymId),
CurPos,
}
impl Ir {
@@ -104,6 +107,7 @@ impl Ir {
Ir::Thunk(t) => t.span,
Ir::Builtins(b) => b.span,
Ir::Builtin(b) => b.span,
Ir::CurPos(c) => c.span,
}
}
}
@@ -119,11 +123,11 @@ impl AttrSet {
if let Some(attr) = path.next() {
// If the path is not yet exhausted, we need to recurse deeper.
match attr {
Attr::Str(ident) => {
Attr::Str(ident, span) => {
// If the next attribute is a static string.
if let Some(&id) = self.stcs.get(&ident) {
if let Some(&(id, _)) = self.stcs.get(&ident) {
// If a sub-attrset already exists, recurse into it.
let mut ir = ctx.extract_expr(id);
let mut ir = ctx.extract_ir(id);
let result = ir
.as_mut()
.try_unwrap_attr_set()
@@ -131,11 +135,14 @@ impl AttrSet {
// This path segment exists but is not an attrset, which is an error.
Error::downgrade_error(format!(
"attribute '{}' already defined but is not an attribute set",
format_symbol(ctx.get_sym(ident))
))
format_symbol(ctx.get_sym(ident)),
),
ctx.get_current_source(),
span
)
})
.and_then(|attrs| attrs._insert(path, name, value, ctx));
ctx.replace_expr(id, ir);
ctx.replace_ir(id, ir);
result?;
} else {
// Create a new sub-attrset because this path doesn't exist yet.
@@ -143,41 +150,45 @@ impl AttrSet {
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span: synthetic_span(),
span,
};
attrs._insert(path, name, value, ctx)?;
let attrs = ctx.new_expr(attrs.to_ir());
self.stcs.insert(ident, attrs);
let attrs_expr = ctx.new_expr(attrs.to_ir());
self.stcs.insert(ident, (attrs_expr, span));
}
Ok(())
}
Attr::Dynamic(dynamic) => {
Attr::Dynamic(dynamic, span) => {
// If the next attribute is a dynamic expression, we must create a new sub-attrset.
// We cannot merge with existing dynamic attributes at this stage.
// FIXME: span
let mut attrs = AttrSet {
stcs: Default::default(),
dyns: Default::default(),
span: synthetic_span(),
span,
};
attrs._insert(path, name, value, ctx)?;
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir())));
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir()), span));
Ok(())
}
}
} else {
// This is the final attribute in the path, so insert the value here.
match name {
Attr::Str(ident) => {
if self.stcs.insert(ident, value).is_some() {
return Err(Error::downgrade_error(format!(
Attr::Str(ident, span) => {
if self.stcs.insert(ident, (value, span)).is_some() {
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(ident))
)));
format_symbol(ctx.get_sym(ident)),
),
ctx.get_current_source(),
span,
));
}
}
Attr::Dynamic(dynamic) => {
self.dyns.push((dynamic, value));
Attr::Dynamic(dynamic, span) => {
self.dyns.push((dynamic, value, span));
}
}
Ok(())
@@ -214,10 +225,10 @@ pub struct ArgId(pub usize);
pub enum Attr {
/// A dynamic attribute key, which is an expression that must evaluate to a string.
/// Example: `attrs.${key}`
Dynamic(ExprId),
Dynamic(ExprId, TextRange),
/// A static attribute key.
/// Example: `attrs.key`
Str(SymId),
Str(SymId, TextRange),
}
/// The kinds of binary operations supported in Nix.

View File

@@ -19,9 +19,11 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
Assert(assert) => assert.downgrade(ctx),
Error(error) => {
let span = error.syntax().text_range();
Err(self::Error::downgrade_error(error.to_string())
.with_span(span)
.with_source(ctx.get_current_source()))
Err(self::Error::downgrade_error(
error.to_string(),
ctx.get_current_source(),
span,
))
}
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
@@ -101,7 +103,41 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
.collect::<Result<Vec<_>>>()?;
let expr = if parts.len() == 1 {
parts.into_iter().next().unwrap()
let part = parts.into_iter().next().unwrap();
if let &Ir::Str(Str { ref val, span }) = ctx.get_ir(part)
&& let Some(path) = val.strip_prefix("<").map(|path| &path[..path.len() - 1])
{
ctx.replace_ir(
part,
Str {
val: path.to_string(),
span,
}
.to_ir(),
);
let sym = ctx.new_sym("findFile".into());
let find_file = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let sym = ctx.new_sym("nixPath".into());
let nix_path = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let call = ctx.new_expr(
Call {
func: find_file,
arg: nix_path,
span,
}
.to_ir(),
);
return Ok(ctx.new_expr(
Call {
func: call,
arg: part,
span,
}
.to_ir(),
));
} else {
part
}
} else {
ctx.new_expr(ConcatStrings { parts, span }.to_ir())
};
@@ -121,7 +157,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
let inner = interpol.expr().unwrap().downgrade(ctx)?;
Ok(ctx.new_expr(Thunk { inner, span }.to_ir()))
}
})
.collect::<Result<Vec<_>>>()?;
@@ -159,9 +196,15 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let sym = self.ident_token().unwrap().to_string();
let sym = ctx.new_sym(sym);
ctx.lookup(sym, self.syntax().text_range())
let text = self.ident_token().unwrap().to_string();
let span = self.syntax().text_range();
if text == "__curPos" {
return Ok(ctx.new_expr(CurPos { span }.to_ir()));
}
let sym = ctx.new_sym(text);
ctx.lookup(sym, span)
}
}
@@ -188,7 +231,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
for sym in binding_keys {
// FIXME: span
let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, expr);
attrs.stcs.insert(*sym, (expr, synthetic_span()));
}
Ok(ctx.new_expr(attrs.to_ir()))
@@ -210,7 +253,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let items = self
.items()
.map(|item| maybe_thunk(item, ctx))
.map(|item| {
let id = item.downgrade(ctx)?;
Ok(ctx.maybe_thunk(id))
})
.collect::<Result<_>>()?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(List { items, span }.to_ir()))
@@ -307,7 +353,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
for sym in binding_keys {
// FIXME: span
let expr = ctx.lookup(sym, synthetic_span())?;
attrs.stcs.insert(sym, expr);
attrs.stcs.insert(sym, (expr, synthetic_span()));
}
Result::Ok(ctx.new_expr(attrs.to_ir()))
@@ -316,7 +362,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
let body_sym = ctx.new_sym("body".to_string());
let select = Select {
expr: attrset_expr,
attrpath: vec![Attr::Str(body_sym)],
attrpath: vec![Attr::Str(body_sym, synthetic_span())],
default: None,
span,
};
@@ -393,14 +439,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
scc_info,
required,
optional,
} = downgrade_pattern_bindings(
pat_entries,
alias,
arg,
ellipsis,
ctx,
|ctx, _| self.body().unwrap().downgrade(ctx),
)?;
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
self.body().unwrap().downgrade(ctx)
})?;
param = Some(Param {
required,
@@ -439,7 +480,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let func = self.lambda().unwrap().downgrade(ctx)?;
let arg = maybe_thunk(self.argument().unwrap(), ctx)?;
let arg = self.argument().unwrap().downgrade(ctx)?;
let arg = ctx.maybe_thunk(arg);
let span = self.syntax().text_range();
Ok(ctx.new_expr(Call { func, arg, span }.to_ir()))
}

View File

@@ -1,3 +1,5 @@
#![allow(unused)]
use rnix::TextRange;
pub fn merge_spans(spans: impl IntoIterator<Item = TextRange>) -> TextRange {
@@ -11,10 +13,6 @@ pub fn merge_spans(spans: impl IntoIterator<Item = TextRange>) -> TextRange {
})
}
pub fn point_span() -> TextRange {
TextRange::new(0.into(), 0.into())
}
pub fn synthetic_span() -> TextRange {
TextRange::new(0.into(), 0.into())
}

View File

@@ -13,57 +13,6 @@ use crate::value::format_symbol;
use super::*;
pub fn maybe_thunk(mut expr: ast::Expr, ctx: &mut impl DowngradeContext) -> Result<ExprId> {
use ast::Expr::*;
let expr = loop {
expr = match expr {
Paren(paren) => paren.expr().unwrap(),
Root(root) => root.expr().unwrap(),
expr => break expr,
}
};
match expr {
Error(error) => {
let span = error.syntax().text_range();
return Err(self::Error::downgrade_error(error.to_string())
.with_span(span)
.with_source(ctx.get_current_source()));
}
Ident(ident) => return ident.downgrade(ctx),
Literal(lit) => return lit.downgrade(ctx),
Str(str) => return str.downgrade(ctx),
Path(path) => return path.downgrade(ctx),
_ => (),
}
let id = match expr {
Apply(apply) => apply.downgrade(ctx),
Assert(assert) => assert.downgrade(ctx),
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
Lambda(lambda) => lambda.downgrade(ctx),
LegacyLet(let_) => let_.downgrade(ctx),
LetIn(letin) => letin.downgrade(ctx),
List(list) => list.downgrade(ctx),
BinOp(op) => op.downgrade(ctx),
AttrSet(attrs) => attrs.downgrade(ctx),
UnaryOp(op) => op.downgrade(ctx),
With(with) => with.downgrade(ctx),
HasAttr(has) => has.downgrade(ctx),
_ => unreachable!(),
}?;
Ok(ctx.new_expr(
Thunk {
inner: id,
// span: ctx.get_span(id),
// FIXME: span
span: synthetic_span(),
}
.to_ir(),
))
}
/// Downgrades the entries of an attribute set.
/// This handles `inherit` and `attrpath = value;` entries.
pub fn downgrade_attrs(
@@ -110,7 +59,7 @@ pub fn downgrade_static_attrs(
}
}
Ok(attrs.stcs)
Ok(attrs.stcs.into_iter().map(|(k, (v, _))| (k, v)).collect())
}
/// Downgrades an `inherit` statement.
@@ -118,7 +67,7 @@ pub fn downgrade_static_attrs(
/// `inherit a b;` is translated into `a = a; b = b;` (i.e., bringing variables into scope).
pub fn downgrade_inherit(
inherit: ast::Inherit,
stcs: &mut HashMap<SymId, ExprId>,
stcs: &mut HashMap<SymId, (ExprId, rnix::TextRange)>,
ctx: &mut impl DowngradeContext,
) -> Result<()> {
// Downgrade the `from` expression if it exists.
@@ -130,21 +79,21 @@ pub fn downgrade_inherit(
for attr in inherit.attrs() {
let span = attr.syntax().text_range();
let ident = match downgrade_attr(attr, ctx)? {
Attr::Str(ident) => ident,
Attr::Str(ident, _) => ident,
_ => {
// `inherit` does not allow dynamic attributes.
return Err(Error::downgrade_error(
"dynamic attributes not allowed in inherit".to_string(),
)
.with_span(span)
.with_source(ctx.get_current_source()));
ctx.get_current_source(),
span,
));
}
};
let expr = if let Some(expr) = from {
let select_expr = ctx.new_expr(
Select {
expr,
attrpath: vec![Attr::Str(ident)],
attrpath: vec![Attr::Str(ident, span)],
default: None,
span,
}
@@ -162,14 +111,18 @@ pub fn downgrade_inherit(
};
match stcs.entry(ident) {
Entry::Occupied(occupied) => {
return Err(Error::downgrade_error(format!(
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(*occupied.key()))
))
),
ctx.get_current_source(),
span,
)
.with_span(span)
.with_source(ctx.get_current_source()));
}
Entry::Vacant(vacant) => vacant.insert(expr),
Entry::Vacant(vacant) => vacant.insert((expr, span)),
};
}
Ok(())
@@ -180,20 +133,24 @@ pub fn downgrade_inherit(
pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<Attr> {
use ast::Attr::*;
use ast::InterpolPart::*;
let span = attr.syntax().text_range();
match attr {
Ident(ident) => Ok(Attr::Str(ctx.new_sym(ident.to_string()))),
Ident(ident) => Ok(Attr::Str(
ctx.new_sym(ident.to_string()),
ident.syntax().text_range(),
)),
Str(string) => {
let parts = string.normalized_parts();
let span = string.syntax().text_range();
if parts.is_empty() {
Ok(Attr::Str(ctx.new_sym("".to_string())))
Ok(Attr::Str(ctx.new_sym("".to_string()), span))
} else if parts.len() == 1 {
// If the string has only one part, it's either a literal or a single interpolation.
match parts.into_iter().next().unwrap() {
Literal(ident) => Ok(Attr::Str(ctx.new_sym(ident))),
Interpolation(interpol) => {
Ok(Attr::Dynamic(interpol.expr().unwrap().downgrade(ctx)?))
}
Literal(ident) => Ok(Attr::Str(ctx.new_sym(ident), span)),
Interpolation(interpol) => Ok(Attr::Dynamic(
interpol.expr().unwrap().downgrade(ctx)?,
span,
)),
}
} else {
// If the string has multiple parts, it's an interpolated string that must be concatenated.
@@ -206,10 +163,14 @@ pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Resul
.collect::<Result<Vec<_>>>()?;
Ok(Attr::Dynamic(
ctx.new_expr(ConcatStrings { parts, span }.to_ir()),
span,
))
}
}
Dynamic(dynamic) => Ok(Attr::Dynamic(dynamic.expr().unwrap().downgrade(ctx)?)),
Dynamic(dynamic) => Ok(Attr::Dynamic(
dynamic.expr().unwrap().downgrade(ctx)?,
dynamic.syntax().text_range(),
)),
}
}
@@ -231,7 +192,8 @@ pub fn downgrade_attrpathvalue(
ctx: &mut impl DowngradeContext,
) -> Result<()> {
let path = downgrade_attrpath(value.attrpath().unwrap(), ctx)?;
let value = maybe_thunk(value.value().unwrap(), ctx)?;
let value = value.value().unwrap().downgrade(ctx)?;
let value = ctx.maybe_thunk(value);
attrs.insert(path, value, ctx)
}
@@ -244,12 +206,14 @@ pub fn downgrade_static_attrpathvalue(
) -> Result<()> {
let attrpath_node = value.attrpath().unwrap();
let path = downgrade_attrpath(attrpath_node.clone(), ctx)?;
if path.iter().any(|attr| matches!(attr, Attr::Dynamic(_))) {
if let Some(&Attr::Dynamic(_, span)) =
path.iter().find(|attr| matches!(attr, Attr::Dynamic(..)))
{
return Err(Error::downgrade_error(
"dynamic attributes not allowed in let bindings".to_string(),
)
.with_span(attrpath_node.syntax().text_range())
.with_source(ctx.get_current_source()));
ctx.get_current_source(),
span,
));
}
let value = value.value().unwrap().downgrade(ctx)?;
attrs.insert(path, value, ctx)
@@ -275,47 +239,53 @@ pub fn downgrade_pattern_bindings<Ctx>(
pat_entries: impl Iterator<Item = ast::PatEntry>,
alias: Option<SymId>,
arg: ExprId,
has_ellipsis: bool,
ctx: &mut Ctx,
body_fn: impl FnOnce(&mut Ctx, &[SymId]) -> Result<ExprId>,
) -> Result<PatternBindings>
where
Ctx: DowngradeContext,
{
let mut param_syms = Vec::new();
let mut param_defaults = Vec::new();
let mut param_spans = Vec::new();
struct Param {
sym: SymId,
sym_span: TextRange,
default: Option<ast::Expr>,
span: TextRange,
}
let mut seen_params = HashSet::new();
for entry in pat_entries {
let sym = ctx.new_sym(entry.ident().unwrap().to_string());
let span = entry.ident().unwrap().syntax().text_range();
let (params, mut binding_keys) = pat_entries
.into_iter()
.map(|entry| {
let ident = entry.ident().unwrap();
let sym_span = ident.syntax().text_range();
let sym = ctx.new_sym(ident.syntax().text().to_string());
let default = entry.default();
let span = entry.syntax().text_range();
if !seen_params.insert(sym) {
return Err(Error::downgrade_error(format!(
"duplicate parameter '{}'",
format_symbol(ctx.get_sym(sym))
return Err(Error::downgrade_error(
format!("duplicate parameter '{}'", format_symbol(ctx.get_sym(sym))),
ctx.get_current_source(),
span,
));
}
Ok((
Param {
sym,
sym_span,
default,
span,
},
sym,
))
.with_span(span)
.with_source(ctx.get_current_source()));
}
})
.collect::<Result<(Vec<_>, Vec<_>)>>()?;
let default_ast = entry.default();
param_syms.push(sym);
param_defaults.push(default_ast);
param_spans.push(span);
}
let mut binding_keys: Vec<SymId> = param_syms.clone();
if let Some(alias_sym) = alias {
binding_keys.push(alias_sym);
}
let (required, optional) =
param_syms
.iter()
.zip(param_defaults.iter())
.partition_map(|(&sym, default)| {
let (required, optional) = params.iter().partition_map(|Param { sym, default, .. }| {
use itertools::Either::*;
if default.is_none() {
Left(sym)
@@ -333,16 +303,18 @@ where
|ctx, sym_to_slot| {
let mut bindings = HashMap::new();
for ((sym, default_ast), span) in param_syms
.iter()
.zip(param_defaults.iter())
.zip(param_spans.iter())
for Param {
sym,
sym_span,
default,
span,
} in params
{
let slot = *sym_to_slot.get(sym).unwrap();
let slot = *sym_to_slot.get(&sym).unwrap();
ctx.set_current_binding(Some(slot));
let default = if let Some(default_expr) = default_ast {
Some(default_expr.clone().downgrade(ctx)?)
let default = if let Some(default) = default {
Some(default.clone().downgrade(ctx)?)
} else {
None
};
@@ -350,13 +322,13 @@ where
let select_expr = ctx.new_expr(
Select {
expr: arg,
attrpath: vec![Attr::Str(*sym)],
attrpath: vec![Attr::Str(sym, sym_span)],
default,
span: *span,
span,
}
.to_ir(),
);
bindings.insert(*sym, select_expr);
bindings.insert(sym, select_expr);
ctx.set_current_binding(None);
}
@@ -433,23 +405,19 @@ where
for (sym, slot) in binding_keys.iter().copied().zip(slots.iter()) {
if let Some(&expr) = bindings.get(&sym) {
ctx.replace_expr(
ctx.replace_ir(
*slot,
Thunk {
inner: expr,
// span: ctx.get_span(expr),
// FIXME: span
span: synthetic_span(),
span: ctx.get_ir(expr).span(),
}
.to_ir(),
);
} else {
return Err(Error::downgrade_error(format!(
return Err(Error::internal(format!(
"binding '{}' not found",
format_symbol(ctx.get_sym(sym))
))
.with_span(synthetic_span())
.with_source(ctx.get_current_source()));
)));
}
}
@@ -484,12 +452,14 @@ where
if let ast::Attr::Ident(ident) = attr {
let sym = ctx.new_sym(ident.to_string());
if !binding_syms.insert(sym) {
return Err(Error::downgrade_error(format!(
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(sym))
))
.with_span(ident.syntax().text_range())
.with_source(ctx.get_current_source()));
),
ctx.get_current_source(),
ident.syntax().text_range(),
));
}
}
}
@@ -504,12 +474,14 @@ where
if let Some(ast::Attr::Ident(ident)) = attrs_vec.first() {
let sym = ctx.new_sym(ident.to_string());
if !binding_syms.insert(sym) {
return Err(Error::downgrade_error(format!(
return Err(Error::downgrade_error(
format!(
"attribute '{}' already defined",
format_symbol(ctx.get_sym(sym))
))
.with_span(ident.syntax().text_range())
.with_source(ctx.get_current_source()));
),
ctx.get_current_source(),
ident.syntax().text_range(),
));
}
}
} else if attrs_vec.len() > 1 {
@@ -563,7 +535,11 @@ where
}
}
Ok(temp_attrs.stcs)
Ok(temp_attrs
.stcs
.into_iter()
.map(|(k, (v, _))| (k, v))
.collect())
},
body_fn,
)

View File

@@ -1,13 +1,13 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::path::{Component, Path, PathBuf};
use std::sync::Once;
use std::sync::{Arc, Once};
use deno_core::{Extension, ExtensionFileSource, JsRuntime, OpState, RuntimeOptions, v8};
use deno_error::JsErrorClass;
use itertools::Itertools as _;
use rust_embed::Embed;
use crate::error::{Error, Result, Source};
use crate::store::Store;
use crate::value::{AttrSet, List, Symbol, Value};
type ScopeRef<'p, 's> = v8::PinnedRef<'p, v8::HandleScope<'s>>;
@@ -18,8 +18,24 @@ pub(crate) trait RuntimeContext: 'static {
fn get_current_dir(&self) -> &Path;
fn add_source(&mut self, path: Source);
fn compile_code(&mut self, source: Source) -> Result<String>;
fn get_current_source(&self) -> Source;
fn get_source(&self, id: usize) -> Source;
fn get_store(&self) -> &dyn Store;
}
pub(crate) trait OpStateExt<Ctx: RuntimeContext> {
fn get_ctx(&self) -> &Ctx;
fn get_ctx_mut(&mut self) -> &mut Ctx;
}
impl<Ctx: RuntimeContext> OpStateExt<Ctx> for OpState {
fn get_ctx(&self) -> &Ctx {
self.try_borrow::<&'static mut Ctx>()
.expect("RuntimeContext not set")
}
fn get_ctx_mut(&mut self) -> &mut Ctx {
self.try_borrow_mut::<&'static mut Ctx>()
.expect("RuntimeContext not set")
}
}
fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
@@ -28,19 +44,22 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
let mut ops = vec![
op_import::<Ctx>(),
op_read_file(),
op_read_file_type(),
op_read_dir(),
op_path_exists(),
op_resolve_path(),
op_sha256_hex(),
op_make_store_path(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(),
op_output_path_name(),
op_make_fixed_output_path(),
op_add_path(),
op_store_path(),
op_to_file(),
op_copy_path_to_store(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
op_store_path::<Ctx>(),
op_to_file::<Ctx>(),
op_copy_path_to_store::<Ctx>(),
op_get_env(),
];
ops.extend(crate::fetcher::register_ops());
ops.extend(crate::fetcher::register_ops::<Ctx>());
Extension {
name: "nix_runtime",
@@ -79,6 +98,10 @@ mod private {
}
pub(crate) use private::NixError;
#[derive(Embed)]
#[folder = "src/runtime/corepkgs"]
pub(crate) struct CorePkgs;
#[deno_core::op2]
#[string]
fn op_import<Ctx: RuntimeContext>(
@@ -86,7 +109,25 @@ fn op_import<Ctx: RuntimeContext>(
#[string] path: String,
) -> std::result::Result<String, NixError> {
let _span = tracing::info_span!("op_import", path = %path).entered();
let ctx = state.borrow_mut::<Ctx>();
let ctx: &mut Ctx = state.get_ctx_mut();
// FIXME: special path type
if path.starts_with("<nix/") && path.ends_with(">") {
let corepkg_name = &path[5..path.len() - 1];
if let Some(file) = CorePkgs::get(corepkg_name) {
tracing::info!("Importing corepkg: {}", corepkg_name);
let source = Source {
ty: crate::error::SourceType::Eval(Arc::new(ctx.get_current_dir().to_path_buf())),
src: str::from_utf8(&file.data)
.expect("corrupted corepkgs file")
.into(),
};
ctx.add_source(source.clone());
return Ok(ctx.compile_code(source).map_err(|err| err.to_string())?);
} else {
return Err(format!("Corepkg not found: {}", corepkg_name).into());
}
}
let current_dir = ctx.get_current_dir();
let mut absolute_path = current_dir
@@ -123,6 +164,71 @@ fn op_path_exists(#[string] path: String) -> bool {
std::path::Path::new(&path).exists()
}
#[deno_core::op2]
#[string]
fn op_read_file_type(#[string] path: String) -> std::result::Result<String, NixError> {
let path = Path::new(&path);
let metadata = std::fs::symlink_metadata(path)
.map_err(|e| format!("Failed to read file type for {}: {}", path.display(), e))?;
let file_type = metadata.file_type();
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
Ok(type_str.to_string())
}
#[deno_core::op2]
#[serde]
fn op_read_dir(
#[string] path: String,
) -> std::result::Result<std::collections::HashMap<String, String>, NixError> {
let path = Path::new(&path);
if !path.is_dir() {
return Err(format!("{} is not a directory", path.display()).into());
}
let entries = std::fs::read_dir(path)
.map_err(|e| format!("Failed to read directory {}: {}", path.display(), e))?;
let mut result = std::collections::HashMap::new();
for entry in entries {
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
let file_name = entry.file_name().to_string_lossy().to_string();
let file_type = entry.file_type().map_err(|e| {
format!(
"Failed to read file type for {}: {}",
entry.path().display(),
e
)
})?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else if file_type.is_file() {
"regular"
} else {
"unknown"
};
result.insert(file_name, type_str.to_string());
}
Ok(result)
}
#[deno_core::op2]
#[string]
fn op_resolve_path(
@@ -168,19 +274,67 @@ fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_hash::sha256_hex(&data)
}
#[deno_core::op2]
#[serde]
fn op_decode_span<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] span_str: String,
) -> std::result::Result<serde_json::Value, NixError> {
let parts: Vec<&str> = span_str.split(':').collect();
if parts.len() != 3 {
return Ok(serde_json::json!({
"file": serde_json::Value::Null,
"line": serde_json::Value::Null,
"column": serde_json::Value::Null
}));
}
let source_id: usize = parts[0].parse().map_err(|_| "Invalid source ID")?;
let start: u32 = parts[1].parse().map_err(|_| "Invalid start offset")?;
let ctx: &Ctx = state.get_ctx();
let source = ctx.get_source(source_id);
let content = &source.src;
let (line, column) = byte_offset_to_line_col(content, start as usize);
Ok(serde_json::json!({
"file": source.get_name(),
"line": line,
"column": column
}))
}
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
let mut line = 1u32;
let mut col = 1u32;
for (idx, ch) in content.char_indices() {
if idx >= offset {
break;
}
if ch == '\n' {
line += 1;
col = 1;
} else {
col += 1;
}
}
(line, col)
}
#[deno_core::op2]
#[string]
fn op_make_store_path(
fn op_make_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_hash::make_store_path(store_dir, &ty, &hash_hex, &name)
}
@@ -192,19 +346,18 @@ fn op_output_path_name(#[string] drv_name: String, #[string] output_name: String
#[deno_core::op2]
#[string]
fn op_make_fixed_output_path(
fn op_make_fixed_output_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256};
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_hash::make_store_path(store_dir, "source", &hash, &name)
@@ -221,18 +374,16 @@ fn op_make_fixed_output_path(
#[deno_core::op2]
#[string]
fn op_add_path(
fn op_add_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use sha2::{Digest, Sha256};
use std::fs;
use std::path::Path;
use std::sync::Arc;
let path_obj = Path::new(&path);
@@ -273,10 +424,10 @@ fn op_add_path(
)));
}
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.as_store()
.add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?;
@@ -320,20 +471,19 @@ fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixEr
#[deno_core::op2]
#[string]
fn op_store_path(
fn op_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::{StoreBackend, validate_store_path};
use std::sync::Arc;
use crate::store::validate_store_path;
let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
validate_store_path(store_dir, &path).map_err(|e| NixError::from(e.to_string()))?;
store
.as_store()
.ensure_path(&path)
.map_err(|e| NixError::from(e.to_string()))?;
@@ -342,18 +492,15 @@ fn op_store_path(
#[deno_core::op2]
#[string]
fn op_to_file(
fn op_to_file<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] name: String,
#[string] contents: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use std::sync::Arc;
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.as_store()
.add_text_to_store(&name, &contents, references)
.map_err(|e| NixError::from(format!("builtins.toFile failed: {}", e)))?;
@@ -362,13 +509,11 @@ fn op_to_file(
#[deno_core::op2]
#[string]
fn op_copy_path_to_store(
fn op_copy_path_to_store<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use std::path::Path;
use std::sync::Arc;
let path_obj = Path::new(&path);
@@ -382,9 +527,9 @@ fn op_copy_path_to_store(
.unwrap_or("source")
.to_string();
let store = state.borrow::<Arc<StoreBackend>>();
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.as_store()
.add_to_store_from_path(&name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to copy path to store: {}", e)))?;
@@ -437,51 +582,20 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
})
}
pub(crate) fn op_state(&mut self) -> std::rc::Rc<std::cell::RefCell<OpState>> {
self.js_runtime.op_state()
}
pub(crate) fn eval(&mut self, script: String, ctx: Ctx) -> Result<Value> {
pub(crate) fn eval(&mut self, script: String, ctx: &mut Ctx) -> Result<Value> {
let ctx: &'static mut Ctx = unsafe { &mut *(ctx as *mut Ctx) };
self.js_runtime.op_state().borrow_mut().put(ctx);
let global_value = self
.js_runtime
.execute_script("<eval>", script)
.map_err(|e| {
.map_err(|error| {
// Get current source from Context
let op_state = self.js_runtime.op_state();
let op_state_borrow = op_state.borrow();
let ctx = op_state_borrow.borrow::<Ctx>();
let ctx: &Ctx = op_state_borrow.get_ctx();
let msg = e.get_message().to_string();
let mut span = None;
let mut source = None;
// Parse Nix stack trace frames
if let Some(stack) = &e.stack {
let frames = crate::error::parse_nix_stack(stack, ctx);
if let Some(last_frame) = frames.last() {
span = Some(last_frame.span);
source = Some(last_frame.source.clone())
}
}
let js_backtrace = e.stack.map(|stack| {
stack
.lines()
.filter(|line| !line.starts_with("NIX_STACK_FRAME:"))
.join("\n")
});
let mut error = Error::eval_error(msg.clone(), js_backtrace);
if let Some(span) = span {
error = error.with_span(span);
}
if let Some(source) = source {
error = error.with_source(source);
}
error
crate::error::parse_js_error(error, ctx)
})?;
// Retrieve scope from JsRuntime

View File

@@ -0,0 +1,76 @@
{
system ? "", # obsolete
url,
hash ? "", # an SRI hash
# Legacy hash specification
md5 ? "",
sha1 ? "",
sha256 ? "",
sha512 ? "",
outputHash ?
if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
executable ? false,
unpack ? false,
name ? baseNameOf (toString url),
# still translates to __impure to trigger derivationStrict error checks.
impure ? false,
}:
derivation (
{
builder = "builtin:fetchurl";
# New-style output content requirements.
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit
name
url
executable
unpack
;
system = "builtin";
# No need to double the amount of network traffic
preferLocalBuild = true;
impureEnvVars = [
# We borrow these environment variables from the caller to allow
# easy proxy configuration. This is impure, but a fixed-output
# derivation like fetchurl is allowed to do so since its result is
# by definition pure.
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
# To make "nix-prefetch-url" work.
urls = [ url ];
}
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
)

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
mod config;
mod error;
mod validation;

View File

@@ -315,3 +315,13 @@ fn builtins_function_args() {
])))
);
}
#[test]
fn builtins_parse_drv_name() {
let result = eval(r#"builtins.parseDrvName "nix-js-0.1.0pre""#).unwrap_attr_set();
assert_eq!(result.get("name"), Some(&Value::String("nix-js".into())));
assert_eq!(
result.get("version"),
Some(&Value::String("0.1.0pre".into()))
);
}

View File

@@ -143,9 +143,10 @@ fn derivation_strict() {
match result {
Value::AttrSet(attrs) => {
assert_eq!(attrs.get("type"), Some(&Value::String("derivation".into())));
assert!(attrs.contains_key("drvPath"));
assert!(attrs.contains_key("outPath"));
assert!(attrs.contains_key("out"));
assert!(!attrs.contains_key("type"));
assert!(!attrs.contains_key("outPath"));
}
_ => panic!("Expected AttrSet"),
}
@@ -189,7 +190,7 @@ fn derivation_escaping_in_aterm() {
#[test]
fn multi_output_two_outputs() {
let result = eval(
let drv = eval(
r#"derivation {
name = "multi";
builder = "/bin/sh";
@@ -198,39 +199,23 @@ fn multi_output_two_outputs() {
}"#,
);
match result {
match drv {
Value::AttrSet(attrs) => {
assert!(attrs.contains_key("drvPath"));
assert!(attrs.contains_key("out"));
assert!(attrs.contains_key("dev"));
assert!(attrs.contains_key("outPath"));
assert!(attrs.contains_key("drvPath"));
// Verify exact paths match CppNix
if let Some(Value::String(drv_path)) = attrs.get("drvPath") {
assert_eq!(
drv_path,
"/nix/store/vmyjryfipkn9ss3ya23hk8p3m58l6dsl-multi.drv"
);
} else {
panic!("drvPath should be a string");
}
if let Some(Value::String(out_path)) = attrs.get("out") {
assert_eq!(
out_path,
"/nix/store/a3d95yg9d215c54n0ybr4npmpnj29229-multi"
panic!(
"drvPath should be a string, got: {:?}",
attrs.get("drvPath")
);
} else {
panic!("out should be a string");
}
if let Some(Value::String(dev_path)) = attrs.get("dev") {
assert_eq!(
dev_path,
"/nix/store/hq3b99lz71gwfq6x8lqwg14hf929q0d2-multi-dev"
);
} else {
panic!("dev should be a string");
}
if let Some(Value::String(out_path)) = attrs.get("outPath") {
@@ -238,7 +223,8 @@ fn multi_output_two_outputs() {
out_path,
"/nix/store/a3d95yg9d215c54n0ybr4npmpnj29229-multi"
);
assert_eq!(attrs.get("out"), Some(&Value::String(out_path.clone())));
} else {
panic!("outPath should be a string");
}
}
_ => panic!("Expected AttrSet"),

38
nix-js/tests/findfile.rs Normal file
View File

@@ -0,0 +1,38 @@
mod utils;
use utils::eval;
#[test]
fn test_find_file_corepkg_fetchurl() {
let result = eval(
r#"
let
searchPath = [];
lookupPath = "nix/fetchurl.nix";
in
builtins.findFile searchPath lookupPath
"#,
);
assert!(result.to_string().contains("fetchurl.nix"));
}
#[test]
fn test_lookup_path_syntax() {
let result = eval(r#"<nix/fetchurl.nix>"#);
assert!(result.to_string().contains("fetchurl.nix"));
}
#[test]
fn test_import_corepkg() {
let result = eval(
r#"
let
fetchurl = import <nix/fetchurl.nix>;
in
builtins.typeOf fetchurl
"#,
);
assert_eq!(result.to_string(), "\"lambda\"");
}

View File

@@ -260,3 +260,111 @@ fn path_deterministic() {
// Same inputs should produce same store path
assert_eq!(result1, result2);
}
#[test]
fn read_file_type_regular_file() {
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt");
std::fs::write(&test_file, "Test content").unwrap();
let expr = format!(r#"builtins.readFileType {}"#, test_file.display());
assert_eq!(eval(&expr), Value::String("regular".to_string()));
}
#[test]
fn read_file_type_directory() {
let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("testdir");
std::fs::create_dir(&test_dir).unwrap();
let expr = format!(r#"builtins.readFileType {}"#, test_dir.display());
assert_eq!(eval(&expr), Value::String("directory".to_string()));
}
#[test]
fn read_file_type_symlink() {
let temp_dir = tempfile::tempdir().unwrap();
let target = temp_dir.path().join("target.txt");
let symlink = temp_dir.path().join("link.txt");
std::fs::write(&target, "Target content").unwrap();
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &symlink).unwrap();
#[cfg(unix)]
{
let expr = format!(r#"builtins.readFileType {}"#, symlink.display());
assert_eq!(eval(&expr), Value::String("symlink".to_string()));
}
}
#[test]
fn read_dir_basic() {
let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("readdir_test");
std::fs::create_dir(&test_dir).unwrap();
std::fs::write(test_dir.join("file1.txt"), "Content 1").unwrap();
std::fs::write(test_dir.join("file2.txt"), "Content 2").unwrap();
std::fs::create_dir(test_dir.join("subdir")).unwrap();
let expr = format!(r#"builtins.readDir {}"#, test_dir.display());
let result = eval(&expr);
if let Value::AttrSet(attrs) = result {
assert_eq!(
attrs.get("file1.txt"),
Some(&Value::String("regular".to_string()))
);
assert_eq!(
attrs.get("file2.txt"),
Some(&Value::String("regular".to_string()))
);
assert_eq!(
attrs.get("subdir"),
Some(&Value::String("directory".to_string()))
);
assert_eq!(attrs.len(), 3);
} else {
panic!("Expected AttrSet, got {:?}", result);
}
}
#[test]
fn read_dir_empty() {
let temp_dir = tempfile::tempdir().unwrap();
let test_dir = temp_dir.path().join("empty_dir");
std::fs::create_dir(&test_dir).unwrap();
let expr = format!(r#"builtins.readDir {}"#, test_dir.display());
let result = eval(&expr);
if let Value::AttrSet(attrs) = result {
assert_eq!(attrs.len(), 0);
} else {
panic!("Expected AttrSet, got {:?}", result);
}
}
#[test]
fn read_dir_nonexistent_fails() {
let expr = r#"builtins.readDir "/nonexistent/directory""#;
let result = eval_result(expr);
assert!(result.is_err());
}
#[test]
fn read_dir_on_file_fails() {
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt");
std::fs::write(&test_file, "Test content").unwrap();
let expr = format!(r#"builtins.readDir {}"#, test_file.display());
let result = eval_result(&expr);
assert!(result.is_err());
let err_msg = result.unwrap_err().to_string();
assert!(err_msg.contains("not a directory"));
}