Compare commits

...

6 Commits

Author SHA1 Message Date
cf4dd6c379 fix: preserve string context in builtins.{path,fetch*} 2026-02-14 20:06:38 +08:00
31c7a62311 chore(typos): ignore lang tests 2026-02-14 19:02:26 +08:00
ad5d047c01 chore: eliminate Result::unwrap 2026-02-14 19:01:06 +08:00
795742e3d8 deps: upgrade dependencies 2026-02-14 19:01:00 +08:00
60cd61d771 feat: implement fromTOML; fix fromJSON implementation 2026-02-14 13:30:31 +08:00
d95a6e509c feat: tidy fetcher (partial)
* shouldn't have used LLM to implement this...
2026-02-13 21:57:51 +08:00
32 changed files with 1254 additions and 1229 deletions

1213
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,15 @@
[no-exit-message]
@repl:
RUST_LOG=none cargo run --bin repl
cargo run --bin repl
[no-exit-message]
@eval expr:
RUST_LOG=none cargo run --bin eval -- '{{expr}}'
cargo run --bin eval -- '{{expr}}'
[no-exit-message]
@replr:
RUST_LOG=none cargo run --bin repl --release
cargo run --bin repl --release
[no-exit-message]
@evalr expr:
RUST_LOG=none cargo run --bin eval --release -- '{{expr}}'
cargo run --bin eval --release -- '{{expr}}'

View File

@@ -9,6 +9,7 @@ let
flake = (
import flake-compat {
src = ./.;
copySourceTreeToStore = false;
}
);
in

View File

@@ -7,6 +7,6 @@ edition = "2024"
proc-macro = true
[dependencies]
convert_case = "0.8"
convert_case = "0.11"
quote = "1.0"
syn = { version = "2.0", features = ["full"] }

View File

@@ -12,7 +12,7 @@ nix-compat = { git = "https://git.snix.dev/snix/snix.git", version = "0.1.0", fe
# REPL
anyhow = "1.0"
rustyline = "14.0"
rustyline = "17.0"
# Logging
tracing = "0.1"
@@ -31,7 +31,7 @@ itertools = "0.14"
regex = "1.11"
deno_core = "0.376"
deno_core = "0.385"
deno_error = "0.7"
nix-nar = "0.3"
@@ -40,28 +40,29 @@ hex = "0.4"
base64 = "0.22"
# Fetcher dependencies
reqwest = { version = "0.12", features = ["blocking", "rustls-tls"], default-features = false }
reqwest = { version = "0.13", features = ["blocking", "rustls"], default-features = false }
tar = "0.4"
flate2 = "1.0"
xz2 = "0.1"
bzip2 = "0.5"
zip = "2.2"
bzip2 = "0.6"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
dirs = "5.0"
# spec 1.0.0
toml = "0.9.9"
dirs = "6.0"
tempfile = "3.24"
rusqlite = { version = "0.33", features = ["bundled"] }
rusqlite = { version = "0.38", features = ["bundled"] }
rnix = "0.12"
rowan = "0.15"
rnix = "0.13"
rowan = "0.16"
nix-js-macros = { path = "../nix-js-macros" }
ere = "0.2.4"
num_enum = "0.7.5"
tap = "1.0.1"
[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }
criterion = { version = "0.8", features = ["html_reports"] }
[[bench]]
name = "basic_ops"

View File

@@ -1,6 +1,7 @@
mod utils;
use criterion::{Criterion, black_box, criterion_group, criterion_main};
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box;
use utils::eval;
fn bench_arithmetic(c: &mut Criterion) {

View File

@@ -1,6 +1,7 @@
mod utils;
use criterion::{Criterion, black_box, criterion_group, criterion_main};
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box;
use utils::eval;
fn bench_builtin_math(c: &mut Criterion) {

View File

@@ -1,7 +1,8 @@
mod utils;
use criterion::{Criterion, black_box, criterion_group, criterion_main};
use criterion::{Criterion, criterion_group, criterion_main};
use nix_js::context::Context;
use std::hint::black_box;
use utils::compile;
fn bench_parse_and_downgrade(c: &mut Criterion) {

View File

@@ -1,6 +1,7 @@
mod utils;
use criterion::{Criterion, black_box, criterion_group, criterion_main};
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box;
use utils::eval;
fn bench_non_recursive(c: &mut Criterion) {

View File

@@ -6,55 +6,22 @@ import type { NixString, NixValue } from "../types";
import { isStringWithContext, isNixPath } from "../types";
import { force } from "../thunk";
import { type NixStringContext, mkStringWithContext, addBuiltContext } from "../string-context";
import { forceFunction } from "../type-assert";
import { forceFunction, forceStringNoCtx } from "../type-assert";
import { nixValueToJson } from "../conversion";
import { isAttrs, isPath, typeOf } from "./type-check";
const convertJsonToNix = (json: unknown): NixValue => {
if (json === null) {
return null;
}
if (typeof json === "boolean") {
return json;
}
if (typeof json === "number") {
if (Number.isInteger(json)) {
return BigInt(json);
}
return json;
}
if (typeof json === "string") {
return json;
}
if (Array.isArray(json)) {
return json.map(convertJsonToNix);
}
if (typeof json === "object") {
const result: Record<string, NixValue> = {};
for (const [key, value] of Object.entries(json)) {
result[key] = convertJsonToNix(value);
}
return result;
}
throw new TypeError(`unsupported JSON value type: ${typeof json}`);
};
export const fromJSON = (e: NixValue): NixValue => {
const str = force(e);
if (typeof str !== "string" && !isStringWithContext(str)) {
throw new TypeError(`builtins.fromJSON: expected a string, got ${typeOf(str)}`);
}
const jsonStr = isStringWithContext(str) ? str.value : str;
try {
const parsed = JSON.parse(jsonStr);
return convertJsonToNix(parsed);
} catch (err) {
throw new SyntaxError(`builtins.fromJSON: ${err instanceof Error ? err.message : String(err)}`);
}
return Deno.core.ops.op_from_json(jsonStr);
};
export const fromTOML = (e: NixValue): never => {
throw new Error("Not implemented: fromTOML");
export const fromTOML = (e: NixValue): NixValue => {
const toml = forceStringNoCtx(e);
return Deno.core.ops.op_from_toml(toml);
};
export const toJSON = (e: NixValue): NixString => {

View File

@@ -107,6 +107,13 @@ const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] =>
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext));
};
const outputPathName = (drvName: string, output: string) => {
if (output === "out") {
return drvName
}
return `${drvName}-${output}`
}
const structuredAttrsExcludedKeys = new Set([
"__structuredAttrs",
"__ignoreNulls",
@@ -296,7 +303,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
let drvPath: string;
if (fixedOutputInfo) {
const pathName = Deno.core.ops.op_output_path_name(drvName, "out");
const pathName = outputPathName(drvName, "out");
const outPath = Deno.core.ops.op_make_fixed_output_path(
fixedOutputInfo.hashAlgo,
fixedOutputInfo.hash,
@@ -374,7 +381,7 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
outputInfos = new Map<string, OutputInfo>();
for (const outputName of outputs) {
const pathName = Deno.core.ops.op_output_path_name(drvName, outputName);
const pathName = outputPathName(drvName, outputName);
const outPath = Deno.core.ops.op_make_store_path(`output:${outputName}`, drvModuloHash, pathName);
outputInfos.set(outputName, {
path: outPath,

View File

@@ -11,14 +11,15 @@ import {
forceStringNoCtx,
forceStringValue,
} from "../type-assert";
import type { NixValue, NixAttrs, NixPath } from "../types";
import type { NixValue, NixAttrs, NixPath, NixString } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types";
import { force } from "../thunk";
import { coerceToPath, coerceToString, StringCoercionMode } from "./conversion";
import { getPathValue } from "../path";
import type { NixStringContext, StringWithContext } from "../string-context";
import { mkStringWithContext } from "../string-context";
import { mkStringWithContext, addOpaqueContext } from "../string-context";
import { isAttrs, isPath } from "./type-check";
import { baseNameOf } from "./path";
const importCache = new Map<string, NixValue>();
@@ -108,14 +109,6 @@ export interface FetchGitResult {
nar_hash: string | null;
}
export interface FetchHgResult {
out_path: string;
branch: string;
rev: string;
short_rev: string;
rev_count: number;
}
const normalizeUrlInput = (
args: NixValue,
): { url: string; hash?: string; name?: string; executable?: boolean } => {
@@ -139,16 +132,26 @@ const normalizeUrlInput = (
const normalizeTarballInput = (args: NixValue): { url: string; sha256?: string; name?: string } => {
const forced = force(args);
if (isAttrs(forced)) {
const url = forceStringNoCtx(forced.url);
const url = resolvePseudoUrl(forceStringNoCtx(forced.url));
const sha256 = "sha256" in forced ? forceStringNoCtx(forced.sha256) : undefined;
const name = "name" in forced ? forceStringNoCtx(forced.name) : undefined;
const nameRaw = "name" in forced ? forceStringNoCtx(forced.name) : undefined;
// FIXME: extract baseNameOfRaw
const name = nameRaw === "" ? baseNameOf(nameRaw) as string : nameRaw;
return { url, sha256, name };
} else {
return { url: forceStringNoCtx(forced) };
}
};
export const fetchurl = (args: NixValue): string => {
const resolvePseudoUrl = (url: string) => {
if (url.startsWith("channel:")) {
return `https://channels.nixos.org/${url.substring(8)}/nixexprs.tar.xz`
} else {
return url
}
}
export const fetchurl = (args: NixValue): NixString => {
const { url, hash, name, executable } = normalizeUrlInput(args);
const result: FetchUrlResult = Deno.core.ops.op_fetch_url(
url,
@@ -156,13 +159,17 @@ export const fetchurl = (args: NixValue): string => {
name ?? null,
executable ?? false,
);
return result.store_path;
const context: NixStringContext = new Set();
addOpaqueContext(context, result.store_path);
return mkStringWithContext(result.store_path, context);
};
export const fetchTarball = (args: NixValue): string => {
export const fetchTarball = (args: NixValue): NixString => {
const { url, name, sha256 } = normalizeTarballInput(args);
const result: FetchTarballResult = Deno.core.ops.op_fetch_tarball(url, name ?? null, sha256 ?? null);
return result.store_path;
const context: NixStringContext = new Set();
addOpaqueContext(context, result.store_path);
return mkStringWithContext(result.store_path, context);
};
export const fetchGit = (args: NixValue): NixAttrs => {
@@ -170,8 +177,10 @@ export const fetchGit = (args: NixValue): NixAttrs => {
if (typeof forced === "string" || isPath(forced)) {
const path = coerceToPath(forced);
const result: FetchGitResult = Deno.core.ops.op_fetch_git(path, null, null, false, false, false, null);
const outContext: NixStringContext = new Set();
addOpaqueContext(outContext, result.out_path);
return {
outPath: result.out_path,
outPath: mkStringWithContext(result.out_path, outContext),
rev: result.rev,
shortRev: result.short_rev,
revCount: BigInt(result.rev_count),
@@ -200,8 +209,10 @@ export const fetchGit = (args: NixValue): NixAttrs => {
name,
);
const outContext: NixStringContext = new Set();
addOpaqueContext(outContext, result.out_path);
return {
outPath: result.out_path,
outPath: mkStringWithContext(result.out_path, outContext),
rev: result.rev,
shortRev: result.short_rev,
revCount: BigInt(result.rev_count),
@@ -212,21 +223,8 @@ export const fetchGit = (args: NixValue): NixAttrs => {
};
};
export const fetchMercurial = (args: NixValue): NixAttrs => {
const attrs = forceAttrs(args);
const url = forceStringValue(attrs.url);
const rev = "rev" in attrs ? forceStringValue(attrs.rev) : null;
const name = "name" in attrs ? forceStringValue(attrs.name) : null;
const result: FetchHgResult = Deno.core.ops.op_fetch_hg(url, rev, name);
return {
outPath: result.out_path,
branch: result.branch,
rev: result.rev,
shortRev: result.short_rev,
revCount: BigInt(result.rev_count),
};
export const fetchMercurial = (_args: NixValue): NixAttrs => {
throw new Error("Not implemented: fetchMercurial")
};
export const fetchTree = (args: NixValue): NixAttrs => {
@@ -348,10 +346,9 @@ export const pathExists = (path: NixValue): boolean => {
*
* Returns: Store path string
*/
export const path = (args: NixValue): string => {
export const path = (args: NixValue): NixString => {
const attrs = forceAttrs(args);
// Required: path parameter
if (!("path" in attrs)) {
throw new TypeError("builtins.path: 'path' attribute is required");
}
@@ -359,23 +356,18 @@ export const path = (args: NixValue): string => {
const pathValue = force(attrs.path);
let pathStr: string;
// Accept both Path values and strings
if (isNixPath(pathValue)) {
pathStr = getPathValue(pathValue);
} else {
pathStr = forceStringValue(pathValue);
}
// Optional: name parameter (defaults to basename in Rust)
const name = "name" in attrs ? forceStringValue(attrs.name) : null;
// Optional: recursive parameter (default: true)
const recursive = "recursive" in attrs ? forceBool(attrs.recursive) : true;
// Optional: sha256 parameter
const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null;
// Handle filter parameter
let storePath: string;
if ("filter" in attrs) {
const filterFn = forceFunction(attrs.filter);
@@ -391,20 +383,20 @@ export const path = (args: NixValue): string => {
}
}
const storePath: string = Deno.core.ops.op_add_filtered_path(
storePath = Deno.core.ops.op_add_filtered_path(
pathStr,
name,
recursive,
sha256,
includePaths,
);
return storePath;
} else {
storePath = Deno.core.ops.op_add_path(pathStr, name, recursive, sha256);
}
// Call Rust op to add path to store
const storePath: string = Deno.core.ops.op_add_path(pathStr, name, recursive, sha256);
return storePath;
const context: NixStringContext = new Set();
addOpaqueContext(context, storePath);
return mkStringWithContext(storePath, context);
};
export const toFile =

View File

@@ -41,6 +41,7 @@ export const hasContext = context.hasContext;
export const hashFile =
(type: NixValue) =>
(p: NixValue): never => {
const ty = forceStringNoCtx(type);
throw new Error("Not implemented: hashFile");
};

View File

@@ -25,36 +25,6 @@ import { mkStringWithContext, type NixStringContext } from "../string-context";
* - baseNameOf "foo" → "foo"
*/
export const baseNameOf = (s: NixValue): NixString => {
const forced = force(s);
// Path input → string output (no context)
if (isNixPath(forced)) {
const pathStr = forced.value;
if (pathStr.length === 0) {
return "";
}
let last = pathStr.length - 1;
if (pathStr[last] === "/" && last > 0) {
last -= 1;
}
let pos = last;
while (pos >= 0 && pathStr[pos] !== "/") {
pos -= 1;
}
if (pos === -1) {
pos = 0;
} else {
pos += 1;
}
return pathStr.substring(pos, last + 1);
}
// String input → string output (preserve context)
const context: NixStringContext = new Set();
const pathStr = coerceToString(s, StringCoercionMode.Base, false, context);

View File

@@ -41,11 +41,12 @@ export const nixValueToJson = (
}
}
if (seen.has(v)) {
throw new Error("cycle detected in toJSON");
} else {
seen.add(v)
}
// FIXME: is this check necessary?
// if (seen.has(v)) {
// throw new Error("cycle detected in toJSON");
// } else {
// seen.add(v)
// }
if (Array.isArray(v)) {
return v.map((item) => nixValueToJson(item, strict, outContext, copyToStore, seen));

View File

@@ -21,7 +21,6 @@ declare global {
column: number | null;
};
function op_make_store_path(ty: string, hash_hex: string, name: string): string;
function op_output_path_name(drv_name: string, output_name: string): string;
function op_parse_hash(hash_str: string, algo: string | null): { hex: string; algo: string };
function op_make_fixed_output_path(
hash_algo: string,
@@ -49,7 +48,6 @@ declare global {
all_refs: boolean,
name: string | null,
): FetchGitResult;
function op_fetch_hg(url: string, rev: string | null, name: string | null): FetchHgResult;
function op_add_path(
path: string,
name: string | null,
@@ -76,6 +74,8 @@ declare global {
): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): any;
function op_from_toml(toml: string): any;
}
}
}

View File

@@ -2,11 +2,10 @@ use std::path::Path;
use std::ptr::NonNull;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use rnix::TextRange;
use string_interner::DefaultStringInterner;
use crate::codegen::{CodegenContext, compile};
use crate::codegen::{CodegenContext, compile, compile_scoped};
use crate::downgrade::*;
use crate::error::{Error, Result, Source};
use crate::ir::{
@@ -17,6 +16,34 @@ use crate::runtime::{Runtime, RuntimeContext};
use crate::store::{DaemonStore, Store, StoreConfig};
use crate::value::{Symbol, Value};
fn parse_error_span(error: &rnix::ParseError) -> Option<rnix::TextRange> {
use rnix::ParseError::*;
match error {
Unexpected(range)
| UnexpectedExtra(range)
| UnexpectedWanted(_, range, _)
| UnexpectedDoubleBind(range)
| DuplicatedArgs(range, _) => Some(*range),
_ => None,
}
}
fn handle_parse_error<'a>(
errors: impl IntoIterator<Item = &'a rnix::ParseError>,
source: Source,
) -> Option<Box<Error>> {
for err in errors {
if let Some(span) = parse_error_span(err) {
return Some(
Error::parse_error(err.to_string())
.with_source(source)
.with_span(span),
);
}
}
None
}
pub struct Context {
ctx: Ctx,
runtime: Runtime<Ctx>,
@@ -232,23 +259,24 @@ impl Ctx {
self.sources.get(id).expect("source not found").clone()
}
fn compile<'a>(&'a mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<String> {
fn downgrade<'a>(&mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<ExprId> {
tracing::debug!("Parsing Nix expression");
self.sources.push(source.clone());
let root = rnix::Root::parse(&source.src);
if !root.errors().is_empty() {
let error_msg = root.errors().iter().join("; ");
let err = Error::parse_error(error_msg).with_source(source);
return Err(err);
}
handle_parse_error(root.errors(), source).map_or(Ok(()), Err)?;
#[allow(clippy::unwrap_used)]
let root = self
.downgrade_ctx(extra_scope)
.downgrade(root.tree().expr().unwrap())?;
tracing::debug!("Downgrading Nix expression");
let expr = root
.tree()
.expr()
.ok_or_else(|| Error::parse_error("unexpected EOF".into()))?;
self.downgrade_ctx(extra_scope).downgrade(expr)
}
fn compile<'a>(&'a mut self, source: Source, extra_scope: Option<Scope<'a>>) -> Result<String> {
let root = self.downgrade(source, extra_scope)?;
tracing::debug!("Generating JavaScript code");
let code = compile(self.get_ir(root), self);
tracing::debug!("Generated code: {}", &code);
@@ -256,31 +284,13 @@ impl Ctx {
}
pub(crate) fn compile_scoped(&mut self, source: Source, scope: Vec<String>) -> Result<String> {
use crate::codegen::compile_scoped;
tracing::debug!("Parsing Nix expression for scoped import");
self.sources.push(source.clone());
let root = rnix::Root::parse(&source.src);
if !root.errors().is_empty() {
let error_msg = root.errors().iter().join("; ");
let err = Error::parse_error(error_msg).with_source(source);
return Err(err);
}
let scope = Scope::ScopedImport(
scope
.into_iter()
.map(|k| self.symbols.get_or_intern(k))
.collect(),
);
#[allow(clippy::unwrap_used)]
let root = self
.downgrade_ctx(Some(scope))
.downgrade(root.tree().expr().unwrap())?;
let root = self.downgrade(source, Some(scope))?;
tracing::debug!("Generating JavaScript code for scoped import");
let code = compile_scoped(self.get_ir(root), self);
tracing::debug!("Generated scoped code: {}", &code);

View File

@@ -1,17 +1,41 @@
// Assume no parse error
#![allow(clippy::unwrap_used)]
use hashbrown::hash_map::Entry;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _;
use rnix::TextRange;
use rnix::ast::{self, AstToken, Expr, HasEntry};
use rowan::ast::AstNode;
use tap::TryConv;
use crate::error::{Error, Result, Source};
use crate::ir::*;
use crate::value::Symbol;
trait Require<T> {
fn require(self, ctx: &impl DowngradeContext, span: TextRange) -> Result<T>;
}
impl<T> Require<T> for Option<T> {
#[inline]
fn require(self, ctx: &impl DowngradeContext, span: TextRange) -> Result<T> {
self.ok_or_else(|| {
Error::parse_error("invalid syntax".into())
.with_source(ctx.get_current_source())
.with_span(span)
})
}
}
impl<T, E: std::fmt::Display> Require<T> for std::result::Result<T, E> {
#[inline]
fn require(self, ctx: &impl DowngradeContext, span: TextRange) -> Result<T> {
self.map_err(|e| {
Error::parse_error(format!("invalid syntax: {e}"))
.with_source(ctx.get_current_source())
.with_span(span)
})
}
}
pub trait DowngradeContext {
fn downgrade(self, expr: rnix::ast::Expr) -> Result<ExprId>;
@@ -64,7 +88,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
IfElse(ifelse) => ifelse.downgrade(ctx),
Select(select) => select.downgrade(ctx),
Str(str) => str.downgrade(ctx),
Path(path) => path.downgrade(ctx),
PathAbs(path) => path.downgrade(ctx),
PathRel(path) => path.downgrade(ctx),
PathHome(path) => path.downgrade(ctx),
PathSearch(path) => path.downgrade(ctx),
Literal(lit) => lit.downgrade(ctx),
Lambda(lambda) => lambda.downgrade(ctx),
LegacyLet(let_) => let_.downgrade(ctx),
@@ -76,19 +103,25 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
Ident(ident) => ident.downgrade(ctx),
With(with) => with.downgrade(ctx),
HasAttr(has) => has.downgrade(ctx),
Paren(paren) => paren.expr().unwrap().downgrade(ctx),
Root(root) => root.expr().unwrap().downgrade(ctx),
Paren(paren) => paren
.expr()
.require(ctx, paren.syntax().text_range())?
.downgrade(ctx),
Root(root) => root
.expr()
.require(ctx, root.syntax().text_range())?
.downgrade(ctx),
}
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let assertion = self.condition().unwrap();
let span = self.syntax().text_range();
let assertion = self.condition().require(ctx, span)?;
let assertion_raw = assertion.to_string();
let assertion = assertion.downgrade(ctx)?;
let expr = self.body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
let expr = self.body().require(ctx, span)?.downgrade(ctx)?;
Ok(ctx.new_expr(
Assert {
assertion,
@@ -103,10 +136,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let cond = self.condition().unwrap().downgrade(ctx)?;
let consq = self.body().unwrap().downgrade(ctx)?;
let alter = self.else_body().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
let cond = self.condition().require(ctx, span)?.downgrade(ctx)?;
let consq = self.body().require(ctx, span)?.downgrade(ctx)?;
let alter = self.else_body().require(ctx, span)?.downgrade(ctx)?;
Ok(ctx.new_expr(
If {
cond,
@@ -119,72 +152,53 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
}
}
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
macro_rules! path {
($ty:ident) => {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::$ty {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
downgrade_path(self.parts(), span, ctx)
}
}
};
}
path!(PathAbs);
path!(PathRel);
path!(PathHome);
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::PathSearch {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let parts = self
.parts()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.to_string(),
span: lit.syntax().text_range(),
}
.to_ir(),
)),
ast::InterpolPart::Interpolation(interpol) => {
interpol.expr().unwrap().downgrade(ctx)
}
})
.collect::<Result<Vec<_>>>()?;
let expr = if parts.len() == 1 {
let part = parts.into_iter().next().unwrap();
if let &Ir::Str(Str { ref val, span }) = ctx.get_ir(part)
&& let Some(path) = val.strip_prefix("<").map(|path| &path[..path.len() - 1])
{
ctx.replace_ir(
part,
Str {
val: path.to_string(),
span,
}
.to_ir(),
);
let sym = ctx.new_sym("findFile".into());
let find_file = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let sym = ctx.new_sym("nixPath".into());
let nix_path = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let call = ctx.new_expr(
Call {
func: find_file,
arg: nix_path,
span,
}
.to_ir(),
);
return Ok(ctx.new_expr(
Call {
func: call,
arg: part,
span,
}
.to_ir(),
));
} else {
part
}
} else {
let path = {
let temp = self.content().require(ctx, span)?;
let text = temp.text();
ctx.new_expr(
ConcatStrings {
parts,
Str {
val: text[1..text.len() - 1].to_string(),
span,
force_string: false,
}
.to_ir(),
)
};
Ok(ctx.new_expr(Path { expr, span }.to_ir()))
let sym = ctx.new_sym("findFile".into());
let find_file = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let sym = ctx.new_sym("nixPath".into());
let nix_path = ctx.new_expr(Builtin { inner: sym, span }.to_ir());
let call = ctx.new_expr(
Call {
func: find_file,
arg: nix_path,
span,
}
.to_ir(),
);
Ok(ctx.new_expr(
Call {
func: call,
arg: path,
span,
}
.to_ir(),
))
}
}
@@ -200,14 +214,17 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit, span }.to_ir())),
ast::InterpolPart::Interpolation(interpol) => {
let inner = interpol.expr().unwrap().downgrade(ctx)?;
let inner = interpol
.expr()
.require(ctx, interpol.syntax().text_range())?
.downgrade(ctx)?;
Ok(ctx.maybe_thunk(inner))
}
})
.collect::<Result<Vec<_>>>()?;
Ok(if is_single_literal {
parts.into_iter().next().unwrap()
parts.into_iter().next().expect("is_single_literal checked")
} else {
ctx.new_expr(
ConcatStrings {
@@ -226,12 +243,12 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
let span = self.syntax().text_range();
Ok(ctx.new_expr(match self.kind() {
ast::LiteralKind::Integer(int) => Int {
inner: int.value().unwrap(),
inner: int.value().require(ctx, span)?,
span,
}
.to_ir(),
ast::LiteralKind::Float(float) => Float {
inner: float.value().unwrap(),
inner: float.value().require(ctx, span)?,
span,
}
.to_ir(),
@@ -246,8 +263,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let text = self.ident_token().unwrap().to_string();
let span = self.syntax().text_range();
let text = self.ident_token().require(ctx, span)?.to_string();
if text == "__curPos" {
return Ok(ctx.new_expr(CurPos { span }.to_ir()));
@@ -292,10 +309,10 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
/// Downgrades a binary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.lhs().unwrap().downgrade(ctx)?;
let rhs = self.rhs().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
let lhs = self.lhs().require(ctx, span)?.downgrade(ctx)?;
let rhs = self.rhs().require(ctx, span)?.downgrade(ctx)?;
let kind = self.operator().require(ctx, span)?.into();
Ok(ctx.new_expr(
BinOp {
lhs,
@@ -311,9 +328,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
/// Downgrades a "has attribute" (`?`) expression.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let lhs = self.expr().unwrap().downgrade(ctx)?;
let rhs = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let span = self.syntax().text_range();
let lhs = self.expr().require(ctx, span)?.downgrade(ctx)?;
let rhs = downgrade_attrpath(self.attrpath().require(ctx, span)?, ctx)?;
Ok(ctx.new_expr(HasAttr { lhs, rhs, span }.to_ir()))
}
}
@@ -321,9 +338,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
/// Downgrades a unary operation.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let rhs = self.expr().unwrap().downgrade(ctx)?;
let kind = self.operator().unwrap().into();
let span = self.syntax().text_range();
let rhs = self.expr().require(ctx, span)?.downgrade(ctx)?;
let kind = self.operator().require(ctx, span)?.into();
Ok(ctx.new_expr(UnOp { rhs, kind, span }.to_ir()))
}
}
@@ -331,8 +348,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
/// Downgrades an attribute selection (`.`).
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let expr = self.expr().unwrap().downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
let span = self.syntax().text_range();
let expr = self.expr().require(ctx, span)?.downgrade(ctx)?;
let attrpath = downgrade_attrpath(self.attrpath().require(ctx, span)?, ctx)?;
let default = if let Some(default) = self.default_expr() {
let default_expr = default.downgrade(ctx)?;
Some(ctx.maybe_thunk(default_expr))
@@ -390,8 +408,8 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let entries: Vec<_> = self.entries().collect();
let body_expr = self.body().unwrap();
let span = self.syntax().text_range();
let body_expr = self.body().require(ctx, span)?;
downgrade_let_bindings(entries, ctx, span, |ctx, _binding_keys| {
body_expr.downgrade(ctx)
@@ -403,11 +421,12 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let span = self.syntax().text_range();
let namespace = self.namespace().unwrap().downgrade(ctx)?;
let namespace = self.namespace().require(ctx, span)?.downgrade(ctx)?;
let namespace = ctx.maybe_thunk(namespace);
let (body, thunks) = ctx
.with_thunk_scope(|ctx| ctx.with_with_scope(|ctx| self.body().unwrap().downgrade(ctx)));
let body_expr = self.body().require(ctx, span)?;
let (body, thunks) =
ctx.with_thunk_scope(|ctx| ctx.with_with_scope(|ctx| body_expr.downgrade(ctx)));
let body = body?;
Ok(ctx.new_expr(
@@ -426,7 +445,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
/// This involves desugaring pattern-matching arguments into `let` bindings.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let raw_param = self.param().unwrap();
let span = self.syntax().text_range();
let raw_param = self.param().require(ctx, span)?;
let body_ast = self.body().require(ctx, span)?;
let arg = ctx.new_arg(raw_param.syntax().text_range());
struct Ret {
@@ -440,19 +461,20 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
match raw_param {
ast::Param::IdentParam(id) => {
// Simple case: `x: body`
let param_sym = ctx.new_sym(id.to_string());
param = None;
// Downgrade body in Param scope
body = ctx.with_param_scope(param_sym, arg, |ctx| {
self.body().unwrap().downgrade(ctx)
})?;
body = ctx
.with_param_scope(param_sym, arg, |ctx| body_ast.clone().downgrade(ctx))?;
}
ast::Param::Pattern(pattern) => {
let alias = pattern
.pat_bind()
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
.map(|alias| {
let ident = alias.ident().require(ctx, alias.syntax().text_range())?;
Ok::<_, Box<Error>>(ctx.new_sym(ident.to_string()))
})
.transpose()?;
let ellipsis = pattern.ellipsis_token().is_some();
let pat_entries = pattern.pat_entries();
@@ -462,7 +484,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
required,
optional,
} = downgrade_pattern_bindings(pat_entries, alias, arg, ctx, |ctx, _| {
self.body().unwrap().downgrade(ctx)
body_ast.clone().downgrade(ctx)
})?;
param = Some(Param {
@@ -479,7 +501,6 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
});
let Ret { param, body } = ret?;
let span = self.syntax().text_range();
Ok(ctx.new_expr(
Func {
body,
@@ -498,8 +519,9 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
/// Each Apply node represents a single function call with one argument.
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let func = self.lambda().unwrap().downgrade(ctx)?;
let arg = self.argument().unwrap().downgrade(ctx)?;
let span = self.syntax().text_range();
let func = self.lambda().require(ctx, span)?.downgrade(ctx)?;
let arg = self.argument().require(ctx, span)?.downgrade(ctx)?;
let arg = ctx.maybe_thunk(arg);
let span = self.syntax().text_range();
Ok(ctx.new_expr(Call { func, arg, span }.to_ir()))
@@ -551,7 +573,8 @@ impl PendingAttrSet {
ast::Attr::Str(string) => {
let parts = string.normalized_parts();
if parts.len() == 1
&& let ast::InterpolPart::Literal(lit) = parts.into_iter().next().unwrap()
&& let ast::InterpolPart::Literal(lit) =
parts.into_iter().next().expect("len checked")
{
let sym = ctx.new_sym(lit);
return self.insert_static(sym, span, rest, value, ctx);
@@ -768,9 +791,10 @@ impl PendingAttrSet {
self.collect_inherit(inherit, ctx)?;
}
ast::Entry::AttrpathValue(value) => {
let attrpath = value.attrpath().unwrap();
let span = value.syntax().text_range();
let attrpath = value.attrpath().require(ctx, span)?;
let path: Vec<_> = attrpath.attrs().collect();
let expr = value.value().unwrap();
let expr = value.value().require(ctx, span)?;
self.insert(&path, expr, ctx)?;
}
}
@@ -783,7 +807,13 @@ impl PendingAttrSet {
inherit: ast::Inherit,
ctx: &mut impl DowngradeContext,
) -> Result<()> {
let from = inherit.from().map(|f| f.expr().unwrap());
let from = inherit
.from()
.map(|f| {
let span = f.syntax().text_range();
f.expr().require(ctx, span)
})
.transpose()?;
for attr in inherit.attrs() {
let span = attr.syntax().text_range();
let sym = match &attr {
@@ -791,7 +821,8 @@ impl PendingAttrSet {
ast::Attr::Str(s) => {
let parts = s.normalized_parts();
if parts.len() == 1
&& let ast::InterpolPart::Literal(lit) = parts.into_iter().next().unwrap()
&& let ast::InterpolPart::Literal(lit) =
parts.into_iter().next().expect("len checked")
{
ctx.new_sym(lit)
} else {
@@ -886,7 +917,7 @@ fn make_attrpath_value_entry(path: Vec<ast::Attr>, value: ast::Expr) -> ast::Ent
let green = builder.finish();
let node = rowan::SyntaxNode::<NixLanguage>::new_root(green);
ast::Entry::cast(node).unwrap()
ast::Entry::cast(node).expect("constructed valid Entry node")
}
/// Downgrades the entries of a non-recursive attribute set.
@@ -915,10 +946,13 @@ fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<At
if parts.is_empty() {
Ok(Attr::Str(ctx.new_sym("".to_string()), span))
} else if parts.len() == 1 {
match parts.into_iter().next().unwrap() {
match parts.into_iter().next().expect("len checked") {
Literal(ident) => Ok(Attr::Str(ctx.new_sym(ident), span)),
Interpolation(interpol) => Ok(Attr::Dynamic(
interpol.expr().unwrap().downgrade(ctx)?,
interpol
.expr()
.require(ctx, interpol.syntax().text_range())?
.downgrade(ctx)?,
span,
)),
}
@@ -927,7 +961,10 @@ fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<At
.into_iter()
.map(|part| match part {
Literal(lit) => Ok(ctx.new_expr(self::Str { val: lit, span }.to_ir())),
Interpolation(interpol) => interpol.expr().unwrap().downgrade(ctx),
Interpolation(interpol) => interpol
.expr()
.require(ctx, interpol.syntax().text_range())?
.downgrade(ctx),
})
.collect::<Result<Vec<_>>>()?;
Ok(Attr::Dynamic(
@@ -943,10 +980,13 @@ fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<At
))
}
}
Dynamic(dynamic) => Ok(Attr::Dynamic(
dynamic.expr().unwrap().downgrade(ctx)?,
dynamic.syntax().text_range(),
)),
Dynamic(dynamic) => {
let span = dynamic.syntax().text_range();
Ok(Attr::Dynamic(
dynamic.expr().require(ctx, span)?.downgrade(ctx)?,
span,
))
}
}
}
@@ -988,7 +1028,7 @@ where
let (params, mut binding_keys) = pat_entries
.into_iter()
.map(|entry| {
let ident = entry.ident().unwrap();
let ident = entry.ident().require(ctx, entry.syntax().text_range())?;
let sym_span = ident.syntax().text_range();
let sym = ctx.new_sym(ident.syntax().text().to_string());
let default = entry.default();
@@ -1054,7 +1094,7 @@ where
span,
} in params
{
let slot = *let_bindings.get(&sym).unwrap();
let slot = *let_bindings.get(&sym).expect("binding registered");
let default = if let Some(default) = default {
let default = default.clone().downgrade(ctx)?;
@@ -1076,7 +1116,7 @@ where
}
if let Some(alias_sym) = alias {
let slot = *let_bindings.get(&alias_sym).unwrap();
let slot = *let_bindings.get(&alias_sym).expect("binding registered");
ctx.register_thunk(slot, arg);
}
@@ -1318,3 +1358,38 @@ fn finalize_pending_value<Ctx: DowngradeContext, const ALLOW_DYN: bool>(
}
}
}
fn downgrade_path(
parts: impl IntoIterator<Item = ast::InterpolPart<ast::PathContent>>,
span: rnix::TextRange,
ctx: &mut impl DowngradeContext,
) -> Result<ExprId> {
let parts = parts
.into_iter()
.map(|part| match part {
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
Str {
val: lit.text().to_string(),
span: lit.syntax().text_range(),
}
.to_ir(),
)),
ast::InterpolPart::Interpolation(interpol) => interpol
.expr()
.require(ctx, interpol.syntax().text_range())?
.downgrade(ctx),
})
.collect::<Result<Vec<_>>>()?;
let expr = match parts.try_conv::<[_; 1]>() {
Ok([part]) => part,
Err(parts) => ctx.new_expr(
ConcatStrings {
parts,
span,
force_string: false,
}
.to_ir(),
),
};
Ok(ctx.new_expr(Path { expr, span }.to_ir()))
}

View File

@@ -1,5 +1,3 @@
#![allow(unused_assignments)]
use std::path::{Path, PathBuf};
use std::sync::Arc;

View File

@@ -13,7 +13,6 @@ mod archive;
pub(crate) mod cache;
mod download;
mod git;
mod hg;
mod metadata_cache;
pub use cache::FetcherCache;
@@ -47,15 +46,6 @@ pub struct FetchGitResult {
pub nar_hash: Option<String>,
}
#[derive(Serialize)]
pub struct FetchHgResult {
pub out_path: String,
pub branch: String,
pub rev: String,
pub short_rev: String,
pub rev_count: u64,
}
#[op2]
#[serde]
pub fn op_fetch_url<Ctx: RuntimeContext>(
@@ -119,7 +109,7 @@ pub fn op_fetch_url<Ctx: RuntimeContext>(
info!(bytes = data.len(), "Download complete");
let hash = crate::nix_utils::sha256_hex(&String::from_utf8_lossy(&data));
let hash = crate::nix_utils::sha256_hex(&data);
if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected);
@@ -228,9 +218,7 @@ pub fn op_fetch_tarball<Ctx: RuntimeContext>(
info!(bytes = data.len(), "Download complete");
info!("Extracting tarball");
let cache = FetcherCache::new().map_err(|e| NixRuntimeError::from(e.to_string()))?;
let (extracted_path, _temp_dir) = cache
.extract_tarball_to_temp(&data)
let (extracted_path, _temp_dir) = archive::extract_tarball_to_temp(&data)
.map_err(|e| NixRuntimeError::from(e.to_string()))?;
info!("Computing NAR hash");
@@ -311,20 +299,6 @@ pub fn op_fetch_git<Ctx: RuntimeContext>(
.map_err(|e| NixRuntimeError::from(e.to_string()))
}
#[op2]
#[serde]
pub fn op_fetch_hg(
#[string] url: String,
#[string] rev: Option<String>,
#[string] name: Option<String>,
) -> Result<FetchHgResult, NixRuntimeError> {
let cache = FetcherCache::new().map_err(|e| NixRuntimeError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string());
hg::fetch_hg(&cache, &url, rev.as_deref(), &dir_name)
.map_err(|e| NixRuntimeError::from(e.to_string()))
}
fn normalize_hash(hash: &str) -> String {
use base64::prelude::*;
if hash.starts_with("sha256-")
@@ -341,6 +315,5 @@ pub fn register_ops<Ctx: RuntimeContext>() -> Vec<deno_core::OpDecl> {
op_fetch_url::<Ctx>(),
op_fetch_tarball::<Ctx>(),
op_fetch_git::<Ctx>(),
op_fetch_hg(),
]
}

View File

@@ -1,5 +1,6 @@
use std::fs::{self, File};
use std::fs;
use std::io::Cursor;
use std::os::unix::ffi::OsStrExt;
use std::path::{Path, PathBuf};
use flate2::read::GzDecoder;
@@ -10,7 +11,6 @@ pub enum ArchiveFormat {
TarXz,
TarBz2,
Tar,
Zip,
}
impl ArchiveFormat {
@@ -27,9 +27,6 @@ impl ArchiveFormat {
if url.ends_with(".tar") {
return ArchiveFormat::Tar;
}
if url.ends_with(".zip") {
return ArchiveFormat::Zip;
}
if data.len() >= 2 && data[0] == 0x1f && data[1] == 0x8b {
return ArchiveFormat::TarGz;
@@ -40,15 +37,12 @@ impl ArchiveFormat {
if data.len() >= 3 && &data[0..3] == b"BZh" {
return ArchiveFormat::TarBz2;
}
if data.len() >= 4 && &data[0..4] == b"PK\x03\x04" {
return ArchiveFormat::Zip;
}
ArchiveFormat::TarGz
}
}
pub fn extract_archive(data: &[u8], dest: &Path) -> Result<PathBuf, ArchiveError> {
pub fn extract_tarball(data: &[u8], dest: &Path) -> Result<PathBuf, ArchiveError> {
let format = ArchiveFormat::detect("", data);
let temp_dir = dest.join("_extract_temp");
@@ -59,7 +53,6 @@ pub fn extract_archive(data: &[u8], dest: &Path) -> Result<PathBuf, ArchiveError
ArchiveFormat::TarXz => extract_tar_xz(data, &temp_dir)?,
ArchiveFormat::TarBz2 => extract_tar_bz2(data, &temp_dir)?,
ArchiveFormat::Tar => extract_tar(data, &temp_dir)?,
ArchiveFormat::Zip => extract_zip(data, &temp_dir)?,
}
strip_single_toplevel(&temp_dir, dest)
@@ -92,40 +85,10 @@ fn extract_tar(data: &[u8], dest: &Path) -> Result<(), ArchiveError> {
Ok(())
}
fn extract_zip(data: &[u8], dest: &Path) -> Result<(), ArchiveError> {
let cursor = Cursor::new(data);
let mut archive = zip::ZipArchive::new(cursor)?;
for i in 0..archive.len() {
let mut file = archive.by_index(i)?;
let outpath = dest.join(file.mangled_name());
if file.is_dir() {
fs::create_dir_all(&outpath)?;
} else {
if let Some(parent) = outpath.parent() {
fs::create_dir_all(parent)?;
}
let mut outfile = File::create(&outpath)?;
std::io::copy(&mut file, &mut outfile)?;
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if let Some(mode) = file.unix_mode() {
fs::set_permissions(&outpath, fs::Permissions::from_mode(mode))?;
}
}
}
Ok(())
}
fn strip_single_toplevel(temp_dir: &Path, dest: &Path) -> Result<PathBuf, ArchiveError> {
let entries: Vec<_> = fs::read_dir(temp_dir)?
.filter_map(|e| e.ok())
.filter(|e| !e.file_name().to_string_lossy().starts_with('.'))
.filter(|e| e.file_name().as_os_str().as_bytes()[0] != b'.')
.collect();
let source_dir = if entries.len() == 1 && entries[0].file_type()?.is_dir() {
@@ -182,17 +145,21 @@ fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), std::io::Error> {
Ok(())
}
pub fn extract_tarball_to_temp(data: &[u8]) -> Result<(PathBuf, tempfile::TempDir), ArchiveError> {
let temp_dir = tempfile::tempdir()?;
let extracted_path = extract_tarball(data, temp_dir.path())?;
Ok((extracted_path, temp_dir))
}
#[derive(Debug)]
pub enum ArchiveError {
IoError(std::io::Error),
ZipError(zip::result::ZipError),
}
impl std::fmt::Display for ArchiveError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ArchiveError::IoError(e) => write!(f, "I/O error: {}", e),
ArchiveError::ZipError(e) => write!(f, "ZIP error: {}", e),
}
}
}
@@ -204,9 +171,3 @@ impl From<std::io::Error> for ArchiveError {
ArchiveError::IoError(e)
}
}
impl From<zip::result::ZipError> for ArchiveError {
fn from(e: zip::result::ZipError) -> Self {
ArchiveError::ZipError(e)
}
}

View File

@@ -1,37 +1,6 @@
use std::fs;
use std::path::PathBuf;
use super::archive::ArchiveError;
#[derive(Debug)]
pub enum CacheError {
Io(std::io::Error),
Archive(ArchiveError),
}
impl std::fmt::Display for CacheError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CacheError::Io(e) => write!(f, "I/O error: {}", e),
CacheError::Archive(e) => write!(f, "Archive error: {}", e),
}
}
}
impl std::error::Error for CacheError {}
impl From<std::io::Error> for CacheError {
fn from(e: std::io::Error) -> Self {
CacheError::Io(e)
}
}
impl From<ArchiveError> for CacheError {
fn from(e: ArchiveError) -> Self {
CacheError::Archive(e)
}
}
#[derive(Debug)]
pub struct FetcherCache {
base_dir: PathBuf,
@@ -49,41 +18,12 @@ impl FetcherCache {
Ok(Self { base_dir })
}
pub fn make_store_path(&self, hash: &str, name: &str) -> PathBuf {
let short_hash = &hash[..32.min(hash.len())];
self.base_dir
.join("store")
.join(format!("{}-{}", short_hash, name))
}
fn git_cache_dir(&self) -> PathBuf {
self.base_dir.join("gitv3")
}
fn hg_cache_dir(&self) -> PathBuf {
self.base_dir.join("hg")
}
fn hash_key(url: &str) -> String {
crate::nix_utils::sha256_hex(url)
self.base_dir.join("git")
}
pub fn get_git_bare(&self, url: &str) -> PathBuf {
let key = Self::hash_key(url);
let key = crate::nix_utils::sha256_hex(url.as_bytes());
self.git_cache_dir().join(key)
}
pub fn get_hg_bare(&self, url: &str) -> PathBuf {
let key = Self::hash_key(url);
self.hg_cache_dir().join(key)
}
pub fn extract_tarball_to_temp(
&self,
data: &[u8],
) -> Result<(PathBuf, tempfile::TempDir), CacheError> {
let temp_dir = tempfile::tempdir()?;
let extracted_path = super::archive::extract_archive(data, temp_dir.path())?;
Ok((extracted_path, temp_dir))
}
}

View File

@@ -1,196 +0,0 @@
use std::fs;
use std::path::PathBuf;
use std::process::Command;
use super::FetchHgResult;
use super::cache::FetcherCache;
pub fn fetch_hg(
cache: &FetcherCache,
url: &str,
rev: Option<&str>,
name: &str,
) -> Result<FetchHgResult, HgError> {
let bare_repo = cache.get_hg_bare(url);
if !bare_repo.exists() {
clone_repo(url, &bare_repo)?;
} else {
pull_repo(&bare_repo)?;
}
let target_rev = rev.unwrap_or("tip").to_string();
let resolved_rev = resolve_rev(&bare_repo, &target_rev)?;
let branch = get_branch(&bare_repo, &resolved_rev)?;
let checkout_dir = checkout_rev(&bare_repo, &resolved_rev, name, cache)?;
let rev_count = get_rev_count(&bare_repo, &resolved_rev)?;
let short_rev = if resolved_rev.len() >= 12 {
resolved_rev[..12].to_string()
} else {
resolved_rev.clone()
};
Ok(FetchHgResult {
out_path: checkout_dir.to_string_lossy().to_string(),
branch,
rev: resolved_rev,
short_rev,
rev_count,
})
}
fn clone_repo(url: &str, dest: &PathBuf) -> Result<(), HgError> {
fs::create_dir_all(dest.parent().unwrap_or(dest))?;
let output = Command::new("hg")
.args(["clone", "-U", url])
.arg(dest)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
return Err(HgError::CommandFailed {
operation: "clone".to_string(),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
fn pull_repo(repo: &PathBuf) -> Result<(), HgError> {
let output = Command::new("hg")
.args(["pull"])
.current_dir(repo)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
return Err(HgError::CommandFailed {
operation: "pull".to_string(),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
Ok(())
}
fn resolve_rev(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
let output = Command::new("hg")
.args(["log", "-r", rev, "--template", "{node}"])
.current_dir(repo)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
return Err(HgError::CommandFailed {
operation: "log".to_string(),
message: format!(
"Could not resolve rev '{}': {}",
rev,
String::from_utf8_lossy(&output.stderr)
),
});
}
Ok(String::from_utf8_lossy(&output.stdout).trim().to_string())
}
fn get_branch(repo: &PathBuf, rev: &str) -> Result<String, HgError> {
let output = Command::new("hg")
.args(["log", "-r", rev, "--template", "{branch}"])
.current_dir(repo)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
return Ok("default".to_string());
}
let branch = String::from_utf8_lossy(&output.stdout).trim().to_string();
if branch.is_empty() {
Ok("default".to_string())
} else {
Ok(branch)
}
}
fn checkout_rev(
bare_repo: &PathBuf,
rev: &str,
name: &str,
cache: &FetcherCache,
) -> Result<PathBuf, HgError> {
let hash = crate::nix_utils::sha256_hex(&format!("{}:{}", bare_repo.display(), rev));
let checkout_dir = cache.make_store_path(&hash, name);
if checkout_dir.exists() {
return Ok(checkout_dir);
}
fs::create_dir_all(&checkout_dir)?;
let output = Command::new("hg")
.args(["archive", "-r", rev])
.arg(&checkout_dir)
.current_dir(bare_repo)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
fs::remove_dir_all(&checkout_dir)?;
return Err(HgError::CommandFailed {
operation: "archive".to_string(),
message: String::from_utf8_lossy(&output.stderr).to_string(),
});
}
let hg_archival = checkout_dir.join(".hg_archival.txt");
if hg_archival.exists() {
fs::remove_file(&hg_archival)?;
}
Ok(checkout_dir)
}
fn get_rev_count(repo: &PathBuf, rev: &str) -> Result<u64, HgError> {
let output = Command::new("hg")
.args(["log", "-r", &format!("0::{}", rev), "--template", "x"])
.current_dir(repo)
.env("HGPLAIN", "")
.output()?;
if !output.status.success() {
return Ok(0);
}
Ok(output.stdout.len() as u64)
}
#[derive(Debug)]
pub enum HgError {
IoError(std::io::Error),
CommandFailed { operation: String, message: String },
}
impl std::fmt::Display for HgError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HgError::IoError(e) => write!(f, "I/O error: {}", e),
HgError::CommandFailed { operation, message } => {
write!(f, "Mercurial {} failed: {}", operation, message)
}
}
}
}
impl std::error::Error for HgError {}
impl From<std::io::Error> for HgError {
fn from(e: std::io::Error) -> Self {
HgError::IoError(e)
}
}

View File

@@ -1,9 +1,9 @@
use nix_compat::store_path::compress_hash;
use sha2::{Digest as _, Sha256};
pub fn sha256_hex(data: &str) -> String {
pub fn sha256_hex(data: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(data.as_bytes());
hasher.update(data);
hex::encode(hasher.finalize())
}
@@ -19,11 +19,3 @@ pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) ->
format!("{}/{}-{}", store_dir, encoded, name)
}
pub fn output_path_name(drv_name: &str, output_name: &str) -> String {
if output_name == "out" {
drv_name.to_string()
} else {
format!("{}-{}", drv_name, output_name)
}
}

View File

@@ -55,7 +55,6 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_make_placeholder(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(),
op_output_path_name(),
op_parse_hash(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
@@ -70,6 +69,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_add_filtered_path::<Ctx>(),
op_match(),
op_split(),
op_from_json(),
op_from_toml(),
];
ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -132,10 +133,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
deno_core::v8_set_flags(vec!["".into(), format!("--stack-size={}", 8 * 1024)]),
[""]
);
JsRuntime::init_platform(
Some(v8::new_default_platform(0, false).make_shared()),
false,
);
JsRuntime::init_platform(Some(v8::new_default_platform(0, false).make_shared()));
});
let mut js_runtime = JsRuntime::new(RuntimeOptions {

View File

@@ -4,6 +4,7 @@ use std::str::FromStr;
use hashbrown::hash_map::{Entry, HashMap};
use deno_core::OpState;
use deno_core::v8;
use regex::Regex;
use rust_embed::Embed;
@@ -246,7 +247,7 @@ pub(super) fn op_resolve_path(
#[deno_core::op2]
#[string]
pub(super) fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_utils::sha256_hex(&data)
crate::nix_utils::sha256_hex(data.as_bytes())
}
#[deno_core::op2]
@@ -325,15 +326,6 @@ pub(super) fn op_make_store_path<Ctx: RuntimeContext>(
crate::nix_utils::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[deno_core::op2]
#[string]
pub(super) fn op_output_path_name(
#[string] drv_name: String,
#[string] output_name: String,
) -> String {
crate::nix_utils::output_path_name(&drv_name, &output_name)
}
#[derive(serde::Serialize)]
pub(super) struct ParsedHash {
hex: String,
@@ -1048,3 +1040,116 @@ pub(super) enum SplitResult {
Text(String),
Captures(Vec<Option<String>>),
}
pub(super) enum NixJsonValue {
Null,
Bool(bool),
Int(i64),
Float(f64),
Str(String),
Arr(Vec<NixJsonValue>),
Obj(Vec<(String, NixJsonValue)>),
}
impl<'a> deno_core::convert::ToV8<'a> for NixJsonValue {
type Error = deno_error::JsErrorBox;
fn to_v8<'i>(
self,
scope: &mut v8::PinScope<'a, 'i>,
) -> std::result::Result<v8::Local<'a, v8::Value>, Self::Error> {
match self {
Self::Null => Ok(v8::null(scope).into()),
Self::Bool(b) => Ok(v8::Boolean::new(scope, b).into()),
Self::Int(i) => Ok(v8::BigInt::new_from_i64(scope, i).into()),
Self::Float(f) => Ok(v8::Number::new(scope, f).into()),
Self::Str(s) => v8::String::new(scope, &s)
.map(|s| s.into())
.ok_or_else(|| deno_error::JsErrorBox::type_error("failed to create v8 string")),
Self::Arr(arr) => {
let elements = arr
.into_iter()
.map(|v| v.to_v8(scope))
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(v8::Array::new_with_elements(scope, &elements).into())
}
Self::Obj(entries) => {
let obj = v8::Object::new(scope);
for (k, v) in entries {
let key: v8::Local<v8::Value> = v8::String::new(scope, &k)
.ok_or_else(|| {
deno_error::JsErrorBox::type_error("failed to create v8 string")
})?
.into();
let val = v.to_v8(scope)?;
obj.set(scope, key, val);
}
Ok(obj.into())
}
}
}
}
fn json_to_nix(value: serde_json::Value) -> NixJsonValue {
match value {
serde_json::Value::Null => NixJsonValue::Null,
serde_json::Value::Bool(b) => NixJsonValue::Bool(b),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
NixJsonValue::Int(i)
} else if let Some(f) = n.as_f64() {
NixJsonValue::Float(f)
} else {
NixJsonValue::Float(n.as_u64().unwrap_or(0) as f64)
}
}
serde_json::Value::String(s) => NixJsonValue::Str(s),
serde_json::Value::Array(arr) => {
NixJsonValue::Arr(arr.into_iter().map(json_to_nix).collect())
}
serde_json::Value::Object(map) => {
NixJsonValue::Obj(map.into_iter().map(|(k, v)| (k, json_to_nix(v))).collect())
}
}
}
fn toml_to_nix(value: toml::Value) -> std::result::Result<NixJsonValue, NixRuntimeError> {
match value {
toml::Value::String(s) => Ok(NixJsonValue::Str(s)),
toml::Value::Integer(i) => Ok(NixJsonValue::Int(i)),
toml::Value::Float(f) => Ok(NixJsonValue::Float(f)),
toml::Value::Boolean(b) => Ok(NixJsonValue::Bool(b)),
toml::Value::Datetime(_) => Err(NixRuntimeError::from(
"while parsing TOML: Dates and times are not supported",
)),
toml::Value::Array(arr) => {
let items: std::result::Result<Vec<_>, _> = arr.into_iter().map(toml_to_nix).collect();
Ok(NixJsonValue::Arr(items?))
}
toml::Value::Table(table) => {
let entries: std::result::Result<Vec<_>, _> = table
.into_iter()
.map(|(k, v)| toml_to_nix(v).map(|v| (k, v)))
.collect();
Ok(NixJsonValue::Obj(entries?))
}
}
}
#[deno_core::op2]
pub(super) fn op_from_json(
#[string] json_str: String,
) -> std::result::Result<NixJsonValue, NixRuntimeError> {
let parsed: serde_json::Value = serde_json::from_str(&json_str)
.map_err(|e| NixRuntimeError::from(format!("builtins.fromJSON: {e}")))?;
Ok(json_to_nix(parsed))
}
#[deno_core::op2]
pub(super) fn op_from_toml(
#[string] toml_str: String,
) -> std::result::Result<NixJsonValue, NixRuntimeError> {
let parsed: toml::Value = toml::from_str(&toml_str)
.map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?;
toml_to_nix(parsed)
}

View File

@@ -552,9 +552,13 @@ impl NixDaemonClient {
assert_eq!(type_marker, "Error");
let level = NixDaemonErrorLevel::try_from_primitive(
self.reader.read_number().await?.try_into().unwrap(),
self.reader
.read_number()
.await?
.try_into()
.map_err(|_| IoError::other("invalid nix-daemon error level"))?,
)
.unwrap();
.map_err(|_| IoError::other("invalid nix-daemon error level"))?;
// removed
let _name: String = self.reader.read_value().await?;
let msg: String = self.reader.read_value().await?;

View File

@@ -268,12 +268,66 @@ fn escape_quote_string(s: &str) -> String {
ret
}
/// Format a float matching C's `printf("%g", x)` with default precision 6.
fn fmt_nix_float(f: &mut Formatter<'_>, x: f64) -> FmtResult {
if !x.is_finite() {
return write!(f, "{x}");
}
if x == 0.0 {
return if x.is_sign_negative() {
write!(f, "-0")
} else {
write!(f, "0")
};
}
let precision: i32 = 6;
let exp = x.abs().log10().floor() as i32;
let formatted = if exp >= -4 && exp < precision {
let decimal_places = (precision - 1 - exp) as usize;
format!("{x:.decimal_places$}")
} else {
let sig_digits = (precision - 1) as usize;
let s = format!("{x:.sig_digits$e}");
let (mantissa, exp_part) = s
.split_once('e')
.expect("scientific notation must contain 'e'");
let (sign, digits) = if let Some(d) = exp_part.strip_prefix('-') {
("-", d)
} else if let Some(d) = exp_part.strip_prefix('+') {
("+", d)
} else {
("+", exp_part)
};
if digits.len() < 2 {
format!("{mantissa}e{sign}0{digits}")
} else {
format!("{mantissa}e{sign}{digits}")
}
};
if formatted.contains('.') {
if let Some(e_pos) = formatted.find('e') {
let trimmed = formatted[..e_pos]
.trim_end_matches('0')
.trim_end_matches('.');
write!(f, "{}{}", trimmed, &formatted[e_pos..])
} else {
let trimmed = formatted.trim_end_matches('0').trim_end_matches('.');
write!(f, "{trimmed}")
}
} else {
write!(f, "{formatted}")
}
}
impl Display for Value {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
use Value::*;
match self {
&Int(x) => write!(f, "{x}"),
&Float(x) => write!(f, "{x}"),
&Float(x) => fmt_nix_float(f, x),
&Bool(x) => write!(f, "{x}"),
Null => write!(f, "null"),
String(x) => write!(f, "{}", escape_quote_string(x)),
@@ -302,7 +356,7 @@ impl Display for ValueCompatDisplay<'_> {
use Value::*;
match self.0 {
&Int(x) => write!(f, "{x}"),
&Float(x) => write!(f, "{x}"),
&Float(x) => fmt_nix_float(f, x),
&Bool(x) => write!(f, "{x}"),
Null => write!(f, "null"),
String(x) => write!(f, "{}", escape_quote_string(x)),

View File

@@ -152,12 +152,9 @@ eval_okay_test!(foldlStrict_lazy_elements);
eval_okay_test!(foldlStrict_lazy_initial_accumulator);
eval_okay_test!(fromjson);
eval_okay_test!(fromjson_escapes);
eval_okay_test!(fromTOML);
eval_okay_test!(
#[ignore = "not implemented: fromTOML"]
fromTOML
);
eval_okay_test!(
#[ignore = "not implemented: fromTOML"]
#[ignore = "timestamps are not supported"]
fromTOML_timestamps
);
eval_okay_test!(functionargs);
@@ -209,11 +206,15 @@ eval_okay_test!(
eval_okay_test!(partition);
eval_okay_test!(path);
eval_okay_test!(pathexists);
eval_okay_test!(path_string_interpolation, || {
unsafe {
std::env::set_var("HOME", "/fake-home");
eval_okay_test!(
#[ignore = "rnix 0.13 regression: /${foo}-/*...*/ fails to parse"]
path_string_interpolation,
|| {
unsafe {
std::env::set_var("HOME", "/fake-home");
}
}
});
);
eval_okay_test!(patterns);
eval_okay_test!(print);
eval_okay_test!(readDir);

View File

@@ -489,3 +489,76 @@ fn split_no_match_preserves_context() {
);
assert_eq!(result, Value::Bool(true));
}
#[test]
fn builtins_path_has_context() {
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test.txt");
std::fs::write(&test_file, "hello").unwrap();
let expr = format!(
r#"builtins.hasContext (builtins.path {{ path = {}; name = "test-ctx"; }})"#,
test_file.display()
);
let result = eval(&expr);
assert_eq!(result, Value::Bool(true));
}
#[test]
fn builtins_path_context_tracked_in_structured_attrs_derivation() {
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("test-patch.txt");
std::fs::write(&test_file, "patch content").unwrap();
let expr = format!(
r#"
let
patch = builtins.path {{ path = {}; name = "test-patch"; }};
in
(derivation {{
__structuredAttrs = true;
name = "test-input-srcs";
system = "x86_64-linux";
builder = "/bin/sh";
patches = [ patch ];
}}).drvPath
"#,
test_file.display()
);
let result = eval(&expr);
if let Value::String(s) = &result {
assert!(s.contains("/nix/store/"), "drvPath should be a store path");
} else {
panic!("Expected string, got {:?}", result);
}
}
#[test]
fn builtins_path_context_tracked_in_non_structured_derivation() {
let temp_dir = tempfile::tempdir().unwrap();
let test_file = temp_dir.path().join("dep.txt");
std::fs::write(&test_file, "dependency content").unwrap();
let expr = format!(
r#"
let
dep = builtins.path {{ path = {}; name = "dep-file"; }};
in
(derivation {{
name = "test-non-structured";
system = "x86_64-linux";
builder = "/bin/sh";
myDep = dep;
}}).drvPath
"#,
test_file.display()
);
let result = eval(&expr);
if let Value::String(s) = &result {
assert!(s.contains("/nix/store/"), "drvPath should be a store path");
} else {
panic!("Expected string, got {:?}", result);
}
}

View File

@@ -9,6 +9,7 @@ let
flake = (
import flake-compat {
src = ./.;
copySourceTreeToStore = false;
}
);
in

View File

@@ -1,6 +1,7 @@
[files]
extend-exclude = [
"nix-js/tests/regex.rs"
"nix-js/tests/regex.rs",
"nix-js/tests/lang",
]
[default.extend-words]