feat: TODO

This commit is contained in:
2025-08-28 18:18:35 +08:00
parent 2fbd2a26a9
commit f7131079e5
26 changed files with 580 additions and 580 deletions

View File

@@ -1,4 +1,3 @@
use std::cell::Cell;
use std::ptr::NonNull;
use bumpalo::{Bump, boxed::Box};
@@ -7,13 +6,12 @@ use itertools::Itertools;
use petgraph::graphmap::DiGraphMap;
use nixjit_builtins::{
Builtins, BuiltinsContext,
builtins::{GLOBAL_LEN, SCOPED_LEN},
builtins::{GLOBAL_LEN, SCOPED_LEN}, BuiltinFn, Builtins, BuiltinsContext
};
use nixjit_error::{Error, Result};
use nixjit_eval::{Args, EvalContext, Value};
use nixjit_hir::{DowngradeContext, Hir};
use nixjit_ir::{AttrSet, ExprId, Param, PrimOpId, StackIdx};
use nixjit_ir::{AttrSet, ExprId, Param, PrimOpId};
use nixjit_lir::Lir;
use crate::downgrade::DowngradeCtx;
@@ -39,16 +37,23 @@ pub struct Context<'bump> {
global_scope: NonNull<HashMap<&'static str, ExprId>>,
/// A dependency graph between expressions.
graph: DiGraphMap<ExprId, StackIdx>,
graph: DiGraphMap<ExprId, ()>,
/// A table of primitive operation implementations.
primops: [(usize, fn(&mut Self, Args) -> Result<Value>); GLOBAL_LEN + SCOPED_LEN],
primops: [(usize, BuiltinFn<Self>); GLOBAL_LEN + SCOPED_LEN],
bump: &'bump Bump,
}
impl Drop for Context<'_> {
fn drop(&mut self) {
// SAFETY: `repl_scope` and `global_scope` are `NonNull` pointers to `HashMap`s
// allocated within the `bump` arena. Because `NonNull` does not convey ownership,
// Rust's drop checker will not automatically drop the pointed-to `HashMap`s when
// the `Context` is dropped. We must manually call `drop_in_place` to ensure
// their destructors are run. This is safe because these pointers are guaranteed
// to be valid and non-null for the lifetime of the `Context`, as they are
// initialized in `new()` and never deallocated or changed.
unsafe {
self.repl_scope.drop_in_place();
self.global_scope.drop_in_place();
@@ -62,10 +67,18 @@ impl<'bump> Context<'bump> {
let global_scope = global
.iter()
.enumerate()
.map(|(idx, (k, _, _))| (*k, unsafe { ExprId::from_raw(idx) }))
.chain(core::iter::once(("builtins", unsafe {
ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN)
})))
.map(|(idx, (k, _, _))| {
// SAFETY: The index `idx` comes from `enumerate()` on the `global` array,
// so it is guaranteed to be a valid, unique index for a primop LIR.
(*k, unsafe { ExprId::from_raw(idx) })
})
.chain(core::iter::once((
"builtins",
// SAFETY: This ID corresponds to the `builtins` attrset LIR, which is
// constructed and placed after all the global and scoped primop LIRs.
// The index is calculated to be exactly at that position.
unsafe { ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN) },
)))
.collect();
let primops = global
.iter()
@@ -74,30 +87,48 @@ impl<'bump> Context<'bump> {
.collect_array()
.unwrap();
let lirs = (0..global.len())
.map(|idx| Lir::PrimOp(unsafe { PrimOpId::from_raw(idx) }))
.chain(
(0..scoped.len())
.map(|idx| Lir::PrimOp(unsafe { PrimOpId::from_raw(idx + GLOBAL_LEN) })),
)
.map(|idx| {
// SAFETY: The index `idx` is guaranteed to be within the bounds of the
// `global` primops array, making it a valid raw ID for a `PrimOpId`.
Lir::PrimOp(unsafe { PrimOpId::from_raw(idx) })
})
.chain((0..scoped.len()).map(|idx| {
// SAFETY: The index `idx` is within the bounds of the `scoped` primops
// array. Adding `GLOBAL_LEN` correctly offsets it to its position in
// the combined `primops` table.
Lir::PrimOp(unsafe { PrimOpId::from_raw(idx + GLOBAL_LEN) })
}))
.chain(core::iter::once(Lir::AttrSet(AttrSet {
stcs: global
.into_iter()
.enumerate()
.map(|(idx, (name, ..))| (name.to_string(), unsafe { ExprId::from_raw(idx) }))
.map(|(idx, (name, ..))| {
// SAFETY: `idx` from `enumerate` is a valid index for the LIR
// corresponding to this global primop.
(name.to_string(), unsafe { ExprId::from_raw(idx) })
})
.chain(scoped.into_iter().enumerate().map(|(idx, (name, ..))| {
// SAFETY: `idx + GLOBAL_LEN` is a valid index for the LIR
// corresponding to this scoped primop.
(name.to_string(), unsafe {
ExprId::from_raw(idx + GLOBAL_LEN)
})
}))
.chain(core::iter::once(("builtins".to_string(), unsafe {
ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN + 1)
})))
.chain(core::iter::once((
"builtins".to_string(),
// SAFETY: This ID points to the `Thunk` that wraps this very
// `AttrSet`. The index is calculated to be one position after
// the `AttrSet` itself.
unsafe { ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN + 1) },
)))
.collect(),
..AttrSet::default()
})))
.chain(core::iter::once(Lir::Thunk(unsafe {
ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN)
})))
.chain(core::iter::once(Lir::Thunk(
// SAFETY: This ID points to the `builtins` `AttrSet` defined just above.
// Its index is calculated to be at that exact position.
unsafe { ExprId::from_raw(GLOBAL_LEN + SCOPED_LEN) },
)))
.map(|lir| Box::new_in(lir, bump))
.collect_vec();
Self {
@@ -144,8 +175,9 @@ impl<'bump> Context<'bump> {
let root = self
.downgrade_ctx()
.downgrade_root(root.tree().expr().unwrap())?;
self.resolve_ctx(root).resolve_root()?;
Ok(self.eval_ctx().eval_root(root)?.to_public())
let ctx = self.resolve_ctx(root);
ctx.resolve_root()?;
Ok(self.eval_ctx().eval(root)?.to_public())
}
pub fn add_binding(&mut self, ident: &str, expr: &str) -> Result<()> {
@@ -157,6 +189,10 @@ impl<'bump> Context<'bump> {
.unwrap();
let expr_id = self.downgrade_ctx().downgrade_root(root_expr)?;
self.resolve_ctx(expr_id).resolve_root()?;
// SAFETY: `repl_scope` is a `NonNull` pointer that is guaranteed to be valid
// for the lifetime of `Context`. It is initialized in `new()` and the memory
// it points to is managed by the `bump` arena. Therefore, it is safe to
// dereference it to a mutable reference here.
unsafe { self.repl_scope.as_mut() }.insert(ident.to_string(), expr_id);
Ok(())
}
@@ -165,20 +201,15 @@ impl<'bump> Context<'bump> {
impl Context<'_> {
fn alloc_id(&mut self) -> ExprId {
self.ir_count += 1;
// SAFETY: This function is the sole source of new `ExprId`s during the
// downgrade and resolve phases. By monotonically incrementing `ir_count`,
// we guarantee that each ID is unique and corresponds to a valid, soon-to-be-
// allocated slot in the IR vectors.
unsafe { ExprId::from_raw(self.ir_count - 1) }
}
fn add_dep(&mut self, from: ExprId, to: ExprId, count: &Cell<usize>) -> StackIdx {
if let Some(&idx) = self.graph.edge_weight(from, to) {
idx
} else {
let idx = count.get();
count.set(idx + 1);
let idx = unsafe { StackIdx::from_raw(idx) };
assert_ne!(from, to);
self.graph.add_edge(from, to, idx);
idx
}
fn add_dep(&mut self, from: ExprId, to: ExprId) {
self.graph.add_edge(from, to, ());
}
}