feat: builtins.unsafeGetAttrPos & __curPos

This commit is contained in:
2026-01-24 22:17:23 +08:00
parent 05b66070a3
commit 7136f57c12
12 changed files with 165 additions and 32 deletions

View File

@@ -140,3 +140,22 @@ export const zipAttrsWith =
return result; return result;
}; };
export const unsafeGetAttrPos =
(attrName: NixValue) =>
(attrSet: NixValue): NixValue => {
const name = forceStringValue(attrName);
const attrs = forceAttrs(attrSet);
if (!(name in attrs)) {
return null;
}
const positions = (attrs as any)[Nix.ATTR_POSITIONS];
if (!positions || !(name in positions)) {
return null;
}
const span = positions[name];
return Nix.mkPos(span);
};

View File

@@ -173,6 +173,7 @@ export const builtins: any = {
catAttrs: mkPrimop(attrs.catAttrs, "catAttrs", 2), catAttrs: mkPrimop(attrs.catAttrs, "catAttrs", 2),
groupBy: mkPrimop(attrs.groupBy, "groupBy", 2), groupBy: mkPrimop(attrs.groupBy, "groupBy", 2),
zipAttrsWith: mkPrimop(attrs.zipAttrsWith, "zipAttrsWith", 2), zipAttrsWith: mkPrimop(attrs.zipAttrsWith, "zipAttrsWith", 2),
unsafeGetAttrPos: mkPrimop(attrs.unsafeGetAttrPos, "unsafeGetAttrPos", 2),
stringLength: mkPrimop(string.stringLength, "stringLength", 1), stringLength: mkPrimop(string.stringLength, "stringLength", 1),
substring: mkPrimop(string.substring, "substring", 3), substring: mkPrimop(string.substring, "substring", 3),
@@ -232,7 +233,6 @@ export const builtins: any = {
1, 1,
), ),
unsafeDiscardStringContext: mkPrimop(misc.unsafeDiscardStringContext, "unsafeDiscardStringContext", 1), unsafeDiscardStringContext: mkPrimop(misc.unsafeDiscardStringContext, "unsafeDiscardStringContext", 1),
unsafeGetAttrPos: mkPrimop(misc.unsafeGetAttrPos, "unsafeGetAttrPos", 2),
addDrvOutputDependencies: mkPrimop(misc.addDrvOutputDependencies, "addDrvOutputDependencies", 2), addDrvOutputDependencies: mkPrimop(misc.addDrvOutputDependencies, "addDrvOutputDependencies", 2),
compareVersions: mkPrimop(misc.compareVersions, "compareVersions", 2), compareVersions: mkPrimop(misc.compareVersions, "compareVersions", 2),
flakeRefToString: mkPrimop(misc.flakeRefToString, "flakeRefToString", 1), flakeRefToString: mkPrimop(misc.flakeRefToString, "flakeRefToString", 1),

View File

@@ -5,7 +5,7 @@
import { createThunk, force } from "../thunk"; import { createThunk, force } from "../thunk";
import { CatchableError } from "../types"; import { CatchableError } from "../types";
import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types"; import type { NixAttrs, NixBool, NixStrictValue, NixValue } from "../types";
import { forceList, forceAttrs, forceFunction, forceStringValue, forceString } from "../type-assert"; import { forceList, forceAttrs, forceFunction, forceStringValue, forceString, forceStringNoCtx } from "../type-assert";
import * as context from "./context"; import * as context from "./context";
import { compareValues } from "../operators"; import { compareValues } from "../operators";
import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check"; import { isBool, isFloat, isInt, isList, isString, typeOf } from "./type-check";
@@ -50,10 +50,6 @@ export const unsafeDiscardOutputDependency = context.unsafeDiscardOutputDependen
export const unsafeDiscardStringContext = context.unsafeDiscardStringContext; export const unsafeDiscardStringContext = context.unsafeDiscardStringContext;
export const unsafeGetAttrPos = (s: NixValue): never => {
throw new Error("Not implemented: unsafeGetAttrPos");
};
export const addDrvOutputDependencies = context.addDrvOutputDependencies; export const addDrvOutputDependencies = context.addDrvOutputDependencies;
export const compareVersions = export const compareVersions =

View File

@@ -388,3 +388,7 @@ export const ifFunc = (cond: NixValue, consq: NixValue, alter: NixValue) => {
} }
return alter; return alter;
}; };
export const mkPos = (span: string): NixAttrs => {
return Deno.core.ops.op_decode_span(span);
};

View File

@@ -18,12 +18,13 @@ import {
pushContext, pushContext,
popContext, popContext,
withContext, withContext,
mkPos,
} from "./helpers"; } from "./helpers";
import { op } from "./operators"; import { op } from "./operators";
import { builtins, PRIMOP_METADATA } from "./builtins"; import { builtins, PRIMOP_METADATA } from "./builtins";
import { coerceToString, StringCoercionMode } from "./builtins/conversion"; import { coerceToString, StringCoercionMode } from "./builtins/conversion";
import { HAS_CONTEXT } from "./string-context"; import { HAS_CONTEXT } from "./string-context";
import { IS_PATH, mkAttrs, mkFunction } from "./types"; import { IS_PATH, mkAttrs, mkFunction, mkAttrsWithPos, ATTR_POSITIONS } from "./types";
import { forceBool } from "./type-assert"; import { forceBool } from "./type-assert";
export type NixRuntime = typeof Nix; export type NixRuntime = typeof Nix;
@@ -53,7 +54,10 @@ export const Nix = {
concatStringsWithContext, concatStringsWithContext,
StringCoercionMode, StringCoercionMode,
mkAttrs, mkAttrs,
mkAttrsWithPos,
mkFunction, mkFunction,
mkPos,
ATTR_POSITIONS,
pushContext, pushContext,
popContext, popContext,

View File

@@ -87,6 +87,39 @@ export const mkAttrs = (attrs: NixAttrs, keys: NixValue[], values: NixValue[]):
return attrs; return attrs;
}; };
const ATTR_POSITIONS = Symbol("attrPositions");
export const mkAttrsWithPos = (
attrs: NixAttrs,
positions: Record<string, string>,
dyns?: { dynKeys: NixValue[]; dynVals: NixValue[]; dynSpans: string[] }
): NixAttrs => {
if (dyns) {
const len = dyns.dynKeys.length;
for (let i = 0; i < len; i++) {
const key = force(dyns.dynKeys[i]);
if (key === null) {
continue;
}
const str = forceStringNoCtx(key);
attrs[str] = dyns.dynVals[i];
positions[str] = dyns.dynSpans[i];
}
}
if (Object.keys(positions).length > 0) {
Object.defineProperty(attrs, ATTR_POSITIONS, {
value: positions,
enumerable: false,
writable: false,
});
}
return attrs;
};
export { ATTR_POSITIONS };
/** /**
* Interface for lazy thunk values * Interface for lazy thunk values
* Thunks delay evaluation until forced * Thunks delay evaluation until forced

View File

@@ -42,6 +42,7 @@ declare global {
function op_read_dir(path: string): Record<string, string>; function op_read_dir(path: string): Record<string, string>;
function op_path_exists(path: string): boolean; function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string; function op_sha256_hex(data: string): string;
function op_decode_span(span: string): { file: string | null; line: number | null; column: number | null };
function op_make_store_path(ty: string, hash_hex: string, name: string): string; function op_make_store_path(ty: string, hash_hex: string, name: string): string;
function op_output_path_name(drv_name: string, output_name: string): string; function op_output_path_name(drv_name: string, output_name: string): string;
function op_make_fixed_output_path( function op_make_fixed_output_path(

View File

@@ -171,6 +171,10 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
) )
} }
} }
Ir::CurPos(cur_pos) => {
let span_str = encode_span(cur_pos.span, ctx);
format!("Nix.mkPos({})", span_str)
}
} }
} }
} }
@@ -346,9 +350,10 @@ impl<Ctx: CodegenContext> Compile<Ctx> for Select {
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet { impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
fn compile(&self, ctx: &Ctx) -> String { fn compile(&self, ctx: &Ctx) -> String {
let mut attrs = Vec::new(); let mut attrs = Vec::new();
let mut attr_positions = Vec::new();
let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok(); let stack_trace_enabled = std::env::var("NIX_JS_STACK_TRACE").is_ok();
for (&sym, &expr) in &self.stcs { for (&sym, &(expr, attr_span)) in &self.stcs {
let key = ctx.get_sym(sym); let key = ctx.get_sym(sym);
let value_code = ctx.get_ir(expr).compile(ctx); let value_code = ctx.get_ir(expr).compile(ctx);
@@ -362,12 +367,15 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
value_code value_code
}; };
attrs.push(format!("{}:{}", key.escape_quote(), value)); attrs.push(format!("{}:{}", key.escape_quote(), value));
let attr_pos_str = encode_span(attr_span, ctx);
attr_positions.push(format!("{}:{}", key.escape_quote(), attr_pos_str));
} }
if !self.dyns.is_empty() { if !self.dyns.is_empty() {
let (keys, vals) = self.dyns.iter().map(|&(key, val)| { let (keys, vals, dyn_spans) = self.dyns.iter().map(|(key, val, attr_span)| {
let key = ctx.get_ir(key).compile(ctx); let key = ctx.get_ir(*key).compile(ctx);
let val_expr = ctx.get_ir(val); let val_expr = ctx.get_ir(*val);
let val = val_expr.compile(ctx); let val = val_expr.compile(ctx);
let span = val_expr.span(); let span = val_expr.span();
let val = if stack_trace_enabled { let val = if stack_trace_enabled {
@@ -379,9 +387,19 @@ impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
} else { } else {
val val
}; };
(key, val) let dyn_span_str = encode_span(*attr_span, ctx);
}).collect::<(Vec<_>, Vec<_>)>(); (key, val, dyn_span_str)
format!("Nix.mkAttrs({{{}}},[{}],[{}])", attrs.join(","), keys.join(","), vals.join(",")) }).multiunzip::<(Vec<_>, Vec<_>, Vec<_>)>();
format!(
"Nix.mkAttrsWithPos({{{}}},{{{}}},{{dynKeys:[{}],dynVals:[{}],dynSpans:[{}]}})",
attrs.join(","),
attr_positions.join(","),
keys.join(","),
vals.join(","),
dyn_spans.join(",")
)
} else if !attr_positions.is_empty() {
format!("Nix.mkAttrsWithPos({{{}}},{{{}}})", attrs.join(","), attr_positions.join(","))
} else { } else {
format!("{{{}}}", attrs.join(",")) format!("{{{}}}", attrs.join(","))
} }

View File

@@ -59,7 +59,7 @@ ir! {
Bool(bool), Bool(bool),
Null, Null,
Str { pub val: String }, Str { pub val: String },
AttrSet { pub stcs: HashMap<SymId, ExprId>, pub dyns: Vec<(ExprId, ExprId)> }, AttrSet { pub stcs: HashMap<SymId, (ExprId, rnix::TextRange)>, pub dyns: Vec<(ExprId, ExprId, rnix::TextRange)> },
List { pub items: Vec<ExprId> }, List { pub items: Vec<ExprId> },
HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> }, HasAttr { pub lhs: ExprId, pub rhs: Vec<Attr> },
@@ -78,6 +78,7 @@ ir! {
Thunk(ExprId), Thunk(ExprId),
Builtins, Builtins,
Builtin(SymId), Builtin(SymId),
CurPos,
} }
impl Ir { impl Ir {
@@ -106,6 +107,7 @@ impl Ir {
Ir::Thunk(t) => t.span, Ir::Thunk(t) => t.span,
Ir::Builtins(b) => b.span, Ir::Builtins(b) => b.span,
Ir::Builtin(b) => b.span, Ir::Builtin(b) => b.span,
Ir::CurPos(c) => c.span,
} }
} }
} }
@@ -123,7 +125,7 @@ impl AttrSet {
match attr { match attr {
Attr::Str(ident, span) => { Attr::Str(ident, span) => {
// If the next attribute is a static string. // If the next attribute is a static string.
if let Some(&id) = self.stcs.get(&ident) { if let Some(&(id, _)) = self.stcs.get(&ident) {
// If a sub-attrset already exists, recurse into it. // If a sub-attrset already exists, recurse into it.
let mut ir = ctx.extract_ir(id); let mut ir = ctx.extract_ir(id);
let result = ir let result = ir
@@ -151,8 +153,8 @@ impl AttrSet {
span, span,
}; };
attrs._insert(path, name, value, ctx)?; attrs._insert(path, name, value, ctx)?;
let attrs = ctx.new_expr(attrs.to_ir()); let attrs_expr = ctx.new_expr(attrs.to_ir());
self.stcs.insert(ident, attrs); self.stcs.insert(ident, (attrs_expr, span));
} }
Ok(()) Ok(())
} }
@@ -166,7 +168,7 @@ impl AttrSet {
span, span,
}; };
attrs._insert(path, name, value, ctx)?; attrs._insert(path, name, value, ctx)?;
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir()))); self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir()), span));
Ok(()) Ok(())
} }
} }
@@ -174,7 +176,7 @@ impl AttrSet {
// This is the final attribute in the path, so insert the value here. // This is the final attribute in the path, so insert the value here.
match name { match name {
Attr::Str(ident, span) => { Attr::Str(ident, span) => {
if self.stcs.insert(ident, value).is_some() { if self.stcs.insert(ident, (value, span)).is_some() {
return Err(Error::downgrade_error( return Err(Error::downgrade_error(
format!( format!(
"attribute '{}' already defined", "attribute '{}' already defined",
@@ -185,8 +187,8 @@ impl AttrSet {
)); ));
} }
} }
Attr::Dynamic(dynamic, _) => { Attr::Dynamic(dynamic, span) => {
self.dyns.push((dynamic, value)); self.dyns.push((dynamic, value, span));
} }
} }
Ok(()) Ok(())

View File

@@ -174,9 +174,15 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident { impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> { fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
let sym = self.ident_token().unwrap().to_string(); let text = self.ident_token().unwrap().to_string();
let sym = ctx.new_sym(sym); let span = self.syntax().text_range();
ctx.lookup(sym, self.syntax().text_range())
if text == "__curPos" {
return Ok(ctx.new_expr(CurPos { span }.to_ir()));
}
let sym = ctx.new_sym(text);
ctx.lookup(sym, span)
} }
} }
@@ -203,7 +209,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
for sym in binding_keys { for sym in binding_keys {
// FIXME: span // FIXME: span
let expr = ctx.lookup(*sym, synthetic_span())?; let expr = ctx.lookup(*sym, synthetic_span())?;
attrs.stcs.insert(*sym, expr); attrs.stcs.insert(*sym, (expr, synthetic_span()));
} }
Ok(ctx.new_expr(attrs.to_ir())) Ok(ctx.new_expr(attrs.to_ir()))
@@ -325,7 +331,7 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
for sym in binding_keys { for sym in binding_keys {
// FIXME: span // FIXME: span
let expr = ctx.lookup(sym, synthetic_span())?; let expr = ctx.lookup(sym, synthetic_span())?;
attrs.stcs.insert(sym, expr); attrs.stcs.insert(sym, (expr, synthetic_span()));
} }
Result::Ok(ctx.new_expr(attrs.to_ir())) Result::Ok(ctx.new_expr(attrs.to_ir()))
@@ -334,7 +340,6 @@ impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
let body_sym = ctx.new_sym("body".to_string()); let body_sym = ctx.new_sym("body".to_string());
let select = Select { let select = Select {
expr: attrset_expr, expr: attrset_expr,
// FIXME: span
attrpath: vec![Attr::Str(body_sym, synthetic_span())], attrpath: vec![Attr::Str(body_sym, synthetic_span())],
default: None, default: None,
span, span,

View File

@@ -59,7 +59,7 @@ pub fn downgrade_static_attrs(
} }
} }
Ok(attrs.stcs) Ok(attrs.stcs.into_iter().map(|(k, (v, _))| (k, v)).collect())
} }
/// Downgrades an `inherit` statement. /// Downgrades an `inherit` statement.
@@ -67,7 +67,7 @@ pub fn downgrade_static_attrs(
/// `inherit a b;` is translated into `a = a; b = b;` (i.e., bringing variables into scope). /// `inherit a b;` is translated into `a = a; b = b;` (i.e., bringing variables into scope).
pub fn downgrade_inherit( pub fn downgrade_inherit(
inherit: ast::Inherit, inherit: ast::Inherit,
stcs: &mut HashMap<SymId, ExprId>, stcs: &mut HashMap<SymId, (ExprId, rnix::TextRange)>,
ctx: &mut impl DowngradeContext, ctx: &mut impl DowngradeContext,
) -> Result<()> { ) -> Result<()> {
// Downgrade the `from` expression if it exists. // Downgrade the `from` expression if it exists.
@@ -122,7 +122,7 @@ pub fn downgrade_inherit(
.with_span(span) .with_span(span)
.with_source(ctx.get_current_source())); .with_source(ctx.get_current_source()));
} }
Entry::Vacant(vacant) => vacant.insert(expr), Entry::Vacant(vacant) => vacant.insert((expr, span)),
}; };
} }
Ok(()) Ok(())
@@ -534,7 +534,7 @@ where
} }
} }
Ok(temp_attrs.stcs) Ok(temp_attrs.stcs.into_iter().map(|(k, (v, _))| (k, v)).collect())
}, },
body_fn, body_fn,
) )

View File

@@ -51,6 +51,7 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_path_exists(), op_path_exists(),
op_resolve_path(), op_resolve_path(),
op_sha256_hex(), op_sha256_hex(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(), op_make_store_path::<Ctx>(),
op_output_path_name(), op_output_path_name(),
op_make_fixed_output_path::<Ctx>(), op_make_fixed_output_path::<Ctx>(),
@@ -268,6 +269,56 @@ fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_hash::sha256_hex(&data) crate::nix_hash::sha256_hex(&data)
} }
#[deno_core::op2]
#[serde]
fn op_decode_span<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] span_str: String,
) -> std::result::Result<serde_json::Value, NixError> {
let parts: Vec<&str> = span_str.split(':').collect();
if parts.len() != 3 {
return Ok(serde_json::json!({
"file": serde_json::Value::Null,
"line": serde_json::Value::Null,
"column": serde_json::Value::Null
}));
}
let source_id: usize = parts[0].parse().map_err(|_| "Invalid source ID")?;
let start: u32 = parts[1].parse().map_err(|_| "Invalid start offset")?;
let ctx: &Ctx = state.get_ctx();
let source = ctx.get_source(source_id);
let content = &source.src;
let (line, column) = byte_offset_to_line_col(content, start as usize);
Ok(serde_json::json!({
"file": source.get_name(),
"line": line,
"column": column
}))
}
fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
let mut line = 1u32;
let mut col = 1u32;
for (idx, ch) in content.char_indices() {
if idx >= offset {
break;
}
if ch == '\n' {
line += 1;
col = 1;
} else {
col += 1;
}
}
(line, col)
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_make_store_path<Ctx: RuntimeContext>( fn op_make_store_path<Ctx: RuntimeContext>(