feat: init
This commit is contained in:
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
target/
|
||||
|
||||
/.direnv/
|
||||
2201
Cargo.lock
generated
Normal file
2201
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
6
Cargo.toml
Normal file
6
Cargo.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[workspace]
|
||||
resolver = "3"
|
||||
members = [
|
||||
"nix-js",
|
||||
"nix-js-macros"
|
||||
]
|
||||
66
flake.lock
generated
Normal file
66
flake.lock
generated
Normal file
@@ -0,0 +1,66 @@
|
||||
{
|
||||
"nodes": {
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767250179,
|
||||
"narHash": "sha256-PnQdWvPZqHp+7yaHWDFX3NYSKaOy0fjkwpR+rIQC7AY=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "a3eaf682db8800962943a77ab77c0aae966f9825",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1767116409,
|
||||
"narHash": "sha256-5vKw92l1GyTnjoLzEagJy5V5mDFck72LiQWZSOnSicw=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "cad22e7d996aea55ecab064e84834289143e44a0",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1767191410,
|
||||
"narHash": "sha256-cCZGjubgDWmstvFkS6eAw2qk2ihgWkycw55u2dtLd70=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "a9026e6d5068172bf5a0d52a260bb290961d1cb4",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
41
flake.nix
Normal file
41
flake.nix
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://cache.garnix.io"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"cache.garnix.io:CTFPyKSLcx5RMJKfLo5EEPUObbA78b0YQ2DTCJXqr9g="
|
||||
];
|
||||
};
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
fenix.url = "github:nix-community/fenix";
|
||||
fenix.inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
outputs = { nixpkgs, fenix, ... }:
|
||||
let
|
||||
forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed;
|
||||
in
|
||||
{
|
||||
devShells = forAllSystems (system:
|
||||
let pkgs = import nixpkgs { inherit system; config.allowUnfree = true; }; in
|
||||
{
|
||||
default = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
(fenix.packages.${system}.stable.withComponents [
|
||||
"cargo"
|
||||
"clippy"
|
||||
"rust-src"
|
||||
"rustc"
|
||||
"rustfmt"
|
||||
"rust-analyzer"
|
||||
])
|
||||
lldb
|
||||
valgrind
|
||||
claude-code
|
||||
];
|
||||
};
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
13
nix-js-macros/Cargo.toml
Normal file
13
nix-js-macros/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "nix-js-macros"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
convert_case = "0.8"
|
||||
quote = "1.0"
|
||||
proc-macro2 = "1.0"
|
||||
syn = { version = "2.0", features = ["full"] }
|
||||
203
nix-js-macros/src/ir.rs
Normal file
203
nix-js-macros/src/ir.rs
Normal file
@@ -0,0 +1,203 @@
|
||||
//! Implements the `ir!` procedural macro.
|
||||
//!
|
||||
//! This macro is designed to reduce the boilerplate associated with defining
|
||||
//! an Intermediate Representation (IR) that follows a specific pattern. It generates:
|
||||
//! 1. An enum representing the different kinds of IR nodes.
|
||||
//! 2. Structs for each of the variants that have fields.
|
||||
//! 3. `Ref` and `Mut` versions of the main enum for ergonomic pattern matching on references.
|
||||
//! 4. `From` implementations to easily convert from a struct variant (e.g., `BinOp`) to the main enum (`Ir::BinOp`).
|
||||
//! 5. A `To[IrName]` trait to provide a convenient `.to_ir()` method on the variant structs.
|
||||
|
||||
use convert_case::{Case, Casing};
|
||||
use proc_macro::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{
|
||||
FieldsNamed, Ident, Token, Type, parenthesized,
|
||||
parse::{Parse, ParseStream, Result},
|
||||
punctuated::Punctuated,
|
||||
token,
|
||||
};
|
||||
|
||||
/// Represents one of the variants passed to the `ir!` macro.
|
||||
pub enum VariantInput {
|
||||
/// A unit-like variant, e.g., `Arg`.
|
||||
Unit(Ident),
|
||||
/// A tuple-like variant with one unnamed field, e.g., `ExprRef(ExprId)`.
|
||||
Tuple(Ident, Type),
|
||||
/// A struct-like variant with named fields, e.g., `BinOp { lhs: ExprId, rhs: ExprId, kind: BinOpKind }`.
|
||||
Struct(Ident, FieldsNamed),
|
||||
}
|
||||
|
||||
/// The top-level input for the `ir!` macro.
|
||||
pub struct MacroInput {
|
||||
/// The name of the main IR enum to be generated (e.g., `Ir`).
|
||||
pub base_name: Ident,
|
||||
/// The list of variants for the enum.
|
||||
pub variants: Punctuated<VariantInput, Token![,]>,
|
||||
}
|
||||
|
||||
impl Parse for VariantInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let name: Ident = input.parse()?;
|
||||
|
||||
if input.peek(token::Paren) {
|
||||
// Parse a tuple-like variant: `Variant(Type)`
|
||||
let content;
|
||||
parenthesized!(content in input);
|
||||
let ty: Type = content.parse()?;
|
||||
|
||||
if !content.is_empty() {
|
||||
return Err(content.error("Expected a single type inside parentheses"));
|
||||
}
|
||||
|
||||
Ok(VariantInput::Tuple(name, ty))
|
||||
} else if input.peek(token::Brace) {
|
||||
// Parse a struct-like variant: `Variant { field: Type, ... }`
|
||||
let fields: FieldsNamed = input.parse()?;
|
||||
Ok(VariantInput::Struct(name, fields))
|
||||
} else {
|
||||
// Parse a unit-like variant: `Variant`
|
||||
Ok(VariantInput::Unit(name))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for MacroInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
// The macro input is expected to be: `IrName, Variant1, Variant2, ...`
|
||||
let base_name = input.parse()?;
|
||||
input.parse::<Token![,]>()?;
|
||||
let variants = Punctuated::parse_terminated(input)?;
|
||||
|
||||
Ok(MacroInput {
|
||||
base_name,
|
||||
variants,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The implementation of the `ir!` macro.
|
||||
pub fn ir_impl(input: TokenStream) -> TokenStream {
|
||||
let parsed_input = syn::parse_macro_input!(input as MacroInput);
|
||||
|
||||
let base_name = &parsed_input.base_name;
|
||||
let ref_name = format_ident!("{}Ref", base_name);
|
||||
let mut_name = format_ident!("{}Mut", base_name);
|
||||
let to_trait_name = format_ident!("To{}", base_name);
|
||||
let to_trait_fn_name = format_ident!("to_{}", base_name.to_string().to_case(Case::Snake));
|
||||
|
||||
let mut enum_variants = Vec::new();
|
||||
let mut struct_defs = Vec::new();
|
||||
let mut ref_variants = Vec::new();
|
||||
let mut mut_variants = Vec::new();
|
||||
let mut as_ref_arms = Vec::new();
|
||||
let mut as_mut_arms = Vec::new();
|
||||
let mut from_impls = Vec::new();
|
||||
let mut to_trait_impls = Vec::new();
|
||||
|
||||
for variant in parsed_input.variants {
|
||||
match variant {
|
||||
VariantInput::Unit(name) => {
|
||||
let inner_type = name.clone();
|
||||
enum_variants.push(quote! { #name(#inner_type) });
|
||||
ref_variants.push(quote! { #name(&'a #inner_type) });
|
||||
mut_variants.push(quote! { #name(&'a mut #inner_type) });
|
||||
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||
from_impls.push(quote! {
|
||||
impl From<#inner_type> for #base_name {
|
||||
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
||||
}
|
||||
});
|
||||
to_trait_impls.push(quote! {
|
||||
impl #to_trait_name for #name {
|
||||
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
|
||||
}
|
||||
});
|
||||
}
|
||||
VariantInput::Tuple(name, ty) => {
|
||||
enum_variants.push(quote! { #name(#ty) });
|
||||
ref_variants.push(quote! { #name(&'a #ty) });
|
||||
mut_variants.push(quote! { #name(&'a mut #ty) });
|
||||
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||
}
|
||||
VariantInput::Struct(name, fields) => {
|
||||
let inner_type = name.clone();
|
||||
struct_defs.push(quote! {
|
||||
#[derive(Debug)]
|
||||
pub struct #name #fields
|
||||
});
|
||||
enum_variants.push(quote! { #name(#inner_type) });
|
||||
ref_variants.push(quote! { #name(&'a #inner_type) });
|
||||
mut_variants.push(quote! { #name(&'a mut #inner_type) });
|
||||
as_ref_arms.push(quote! { Self::#name(inner) => #ref_name::#name(inner) });
|
||||
as_mut_arms.push(quote! { Self::#name(inner) => #mut_name::#name(inner) });
|
||||
from_impls.push(quote! {
|
||||
impl From<#inner_type> for #base_name {
|
||||
fn from(val: #inner_type) -> Self { #base_name::#name(val) }
|
||||
}
|
||||
});
|
||||
to_trait_impls.push(quote! {
|
||||
impl #to_trait_name for #name {
|
||||
fn #to_trait_fn_name(self) -> #base_name { #base_name::from(self) }
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Assemble the final generated code.
|
||||
let expanded = quote! {
|
||||
/// The main IR enum, generated by the `ir!` macro.
|
||||
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||
pub enum #base_name {
|
||||
#( #enum_variants ),*
|
||||
}
|
||||
|
||||
// The struct definitions for the enum variants.
|
||||
#( #struct_defs )*
|
||||
|
||||
/// An immutable reference version of the IR enum.
|
||||
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||
pub enum #ref_name<'a> {
|
||||
#( #ref_variants ),*
|
||||
}
|
||||
|
||||
/// A mutable reference version of the IR enum.
|
||||
#[derive(Debug, IsVariant, Unwrap, TryUnwrap)]
|
||||
pub enum #mut_name<'a> {
|
||||
#( #mut_variants ),*
|
||||
}
|
||||
|
||||
impl #base_name {
|
||||
/// Converts a `&Ir` into a `IrRef`.
|
||||
pub fn as_ref(&self) -> #ref_name<'_> {
|
||||
match self {
|
||||
#( #as_ref_arms ),*
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a `&mut Ir` into a `IrMut`.
|
||||
pub fn as_mut(&mut self) -> #mut_name<'_> {
|
||||
match self {
|
||||
#( #as_mut_arms ),*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// `From` implementations for converting variant structs into the main enum.
|
||||
#( #from_impls )*
|
||||
|
||||
/// A trait for converting a variant struct into the main IR enum.
|
||||
pub trait #to_trait_name {
|
||||
/// Performs the conversion.
|
||||
fn #to_trait_fn_name(self) -> #base_name;
|
||||
}
|
||||
|
||||
// Implement the `ToIr` trait for each variant struct.
|
||||
#( #to_trait_impls )*
|
||||
};
|
||||
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
13
nix-js-macros/src/lib.rs
Normal file
13
nix-js-macros/src/lib.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
//! This crate provides procedural macros for the nixjit project.
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
mod ir;
|
||||
|
||||
/// A procedural macro to reduce boilerplate when defining an Intermediate Representation (IR).
|
||||
///
|
||||
/// It generates an enum for the IR, along with `Ref` and `Mut` variants,
|
||||
/// `From` implementations, and a `ToIr` trait.
|
||||
#[proc_macro]
|
||||
pub fn ir(input: TokenStream) -> TokenStream {
|
||||
ir::ir_impl(input)
|
||||
}
|
||||
25
nix-js/Cargo.toml
Normal file
25
nix-js/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "nix-js"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
mimalloc = "0.1"
|
||||
|
||||
# REPL
|
||||
anyhow = "1.0"
|
||||
rustyline = "14.0"
|
||||
|
||||
regex = "1.11"
|
||||
bumpalo = { version = "3.19", features = ["boxed"] }
|
||||
hashbrown = "0.16"
|
||||
derive_more = { version = "2", features = ["full"] }
|
||||
thiserror = "2"
|
||||
string-interner = "0.19"
|
||||
|
||||
v8 = "142.2"
|
||||
deno_core = "0.376"
|
||||
|
||||
rnix = "0.12"
|
||||
|
||||
nix-js-macros = { path = "../nix-js-macros" }
|
||||
225
nix-js/src/codegen.rs
Normal file
225
nix-js/src/codegen.rs
Normal file
@@ -0,0 +1,225 @@
|
||||
use crate::ir::*;
|
||||
|
||||
pub trait Compile<Ctx: CodegenContext> {
|
||||
fn compile(&self, ctx: &Ctx) -> String;
|
||||
}
|
||||
|
||||
pub trait CodegenContext {
|
||||
fn get_ir(&self, id: ExprId) -> &Ir;
|
||||
fn get_sym(&self, id: SymId) -> &str;
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for Ir {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
match self {
|
||||
Ir::Const(Const { val }) => match val {
|
||||
crate::value::Const::Null => "null".to_string(),
|
||||
crate::value::Const::Int(val) => val.to_string(),
|
||||
crate::value::Const::Float(val) => val.to_string(),
|
||||
crate::value::Const::Bool(val) => val.to_string(),
|
||||
},
|
||||
&Ir::If(If { cond, consq, alter }) => {
|
||||
let cond = ctx.get_ir(cond).compile(ctx);
|
||||
let consq = ctx.get_ir(consq).compile(ctx);
|
||||
let alter = ctx.get_ir(alter).compile(ctx);
|
||||
format!("({cond})?({consq}):({alter})")
|
||||
}
|
||||
Ir::BinOp(x) => x.compile(ctx),
|
||||
Ir::UnOp(x) => x.compile(ctx),
|
||||
Ir::Func(x) => x.compile(ctx),
|
||||
Ir::AttrSet(x) => x.compile(ctx),
|
||||
&Ir::Call(Call { func, arg }) => {
|
||||
let func = ctx.get_ir(func).compile(ctx);
|
||||
let arg = ctx.get_ir(arg).compile(ctx);
|
||||
format!("NixRuntime.force({func})({arg})")
|
||||
}
|
||||
Ir::Arg(x) => format!("arg{}", x.0),
|
||||
Ir::Let(x) => x.compile(ctx),
|
||||
Ir::Select(x) => x.compile(ctx),
|
||||
&Ir::Thunk(expr_id) => {
|
||||
let inner = ctx.get_ir(expr_id).compile(ctx);
|
||||
format!("NixRuntime.create_thunk(()=>({}))", inner)
|
||||
}
|
||||
&Ir::ExprRef(expr_id) => {
|
||||
format!("expr{}", expr_id.0)
|
||||
}
|
||||
ir => todo!("{ir:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for BinOp {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
use BinOpKind::*;
|
||||
let lhs = ctx.get_ir(self.lhs).compile(ctx);
|
||||
let rhs = ctx.get_ir(self.rhs).compile(ctx);
|
||||
match self.kind {
|
||||
Add => format!("NixRuntime.op.add({},{})", lhs, rhs),
|
||||
Sub => format!("NixRuntime.op.sub({},{})", lhs, rhs),
|
||||
Mul => format!("NixRuntime.op.mul({},{})", lhs, rhs),
|
||||
Div => format!("NixRuntime.op.div({},{})", lhs, rhs),
|
||||
Eq => format!("NixRuntime.op.eq({},{})", lhs, rhs),
|
||||
Neq => format!("NixRuntime.op.neq({},{})", lhs, rhs),
|
||||
Lt => format!("NixRuntime.op.lt({},{})", lhs, rhs),
|
||||
Gt => format!("NixRuntime.op.gt({},{})", lhs, rhs),
|
||||
Leq => format!("NixRuntime.op.lte({},{})", lhs, rhs),
|
||||
Geq => format!("NixRuntime.op.gte({},{})", lhs, rhs),
|
||||
And => format!("NixRuntime.op.band({},{})", lhs, rhs),
|
||||
Or => format!("NixRuntime.op.bor({},{})", lhs, rhs),
|
||||
Impl => format!("NixRuntime.op.bor(NixRuntime.op.bnot({}),{})", lhs, rhs),
|
||||
_ => todo!("BinOpKind::{:?}", self.kind),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for UnOp {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
use UnOpKind::*;
|
||||
let rhs = ctx.get_ir(self.rhs).compile(ctx);
|
||||
match self.kind {
|
||||
Neg => format!("NixRuntime.op.sub(0,{rhs})"),
|
||||
Not => format!("NixRuntime.op.bnot({rhs})")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for Func {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().0;
|
||||
let body = ctx.get_ir(self.body).compile(ctx);
|
||||
|
||||
// Generate parameter validation code
|
||||
let param_check = self.generate_param_check(ctx);
|
||||
|
||||
if param_check.is_empty() {
|
||||
// Simple function without parameter validation
|
||||
format!("arg{id}=>({body})")
|
||||
} else {
|
||||
// Function with parameter validation (use block statement, not object literal)
|
||||
format!("arg{id}=>{{{}return {}}}", param_check, body)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Func {
|
||||
fn generate_param_check<Ctx: CodegenContext>(&self, ctx: &Ctx) -> String {
|
||||
let has_checks = self.param.required.is_some() || self.param.allowed.is_some();
|
||||
|
||||
if !has_checks {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let id = ctx.get_ir(self.arg).as_ref().unwrap_arg().0;
|
||||
|
||||
// Build required parameter array
|
||||
let required = if let Some(req) = &self.param.required {
|
||||
let keys: Vec<_> = req
|
||||
.iter()
|
||||
.map(|&sym| format!("\"{}\"", ctx.get_sym(sym)))
|
||||
.collect();
|
||||
format!("[{}]", keys.join(","))
|
||||
} else {
|
||||
"null".to_string()
|
||||
};
|
||||
|
||||
// Build allowed parameter array
|
||||
let allowed = if let Some(allow) = &self.param.allowed {
|
||||
let keys: Vec<_> = allow
|
||||
.iter()
|
||||
.map(|&sym| format!("\"{}\"", ctx.get_sym(sym)))
|
||||
.collect();
|
||||
format!("[{}]", keys.join(","))
|
||||
} else {
|
||||
"null".to_string()
|
||||
};
|
||||
|
||||
// Call NixRuntime.validate_params and store the result
|
||||
format!("NixRuntime.validate_params(arg{},{},{});", id, required, allowed)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for Let {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
let declarations: Vec<String> = self
|
||||
.bindings
|
||||
.iter()
|
||||
.map(|&expr| format!("let expr{}", expr.0))
|
||||
.collect();
|
||||
|
||||
let assignments: Vec<String> = self
|
||||
.bindings
|
||||
.iter()
|
||||
.map(|&expr| {
|
||||
let value = ctx.get_ir(expr).compile(ctx);
|
||||
format!("expr{}={}", expr.0, value)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let body = ctx.get_ir(self.body).compile(ctx);
|
||||
|
||||
format!(
|
||||
"(()=>{{{}; {}; return {}}})()",
|
||||
declarations.join(";"),
|
||||
assignments.join(";"),
|
||||
body
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for Select {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
let expr = ctx.get_ir(self.expr).compile(ctx);
|
||||
|
||||
let mut result = expr;
|
||||
let attr_count = self.attrpath.len();
|
||||
|
||||
for (i, attr) in self.attrpath.iter().enumerate() {
|
||||
let is_last = i == attr_count - 1;
|
||||
let has_default = self.default.is_some() && is_last;
|
||||
|
||||
result = match attr {
|
||||
Attr::Str(sym) => {
|
||||
let key = ctx.get_sym(*sym);
|
||||
if has_default {
|
||||
let default_val = ctx.get_ir(self.default.unwrap()).compile(ctx);
|
||||
format!("NixRuntime.select_with_default({}, \"{}\", {})", result, key, default_val)
|
||||
} else {
|
||||
format!("NixRuntime.select({}, \"{}\")", result, key)
|
||||
}
|
||||
}
|
||||
Attr::Dynamic(expr_id) => {
|
||||
let key = ctx.get_ir(*expr_id).compile(ctx);
|
||||
if has_default {
|
||||
let default_val = ctx.get_ir(self.default.unwrap()).compile(ctx);
|
||||
format!("NixRuntime.select_with_default({}, {}, {})", result, key, default_val)
|
||||
} else {
|
||||
format!("NixRuntime.select({}, {})", result, key)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: CodegenContext> Compile<Ctx> for AttrSet {
|
||||
fn compile(&self, ctx: &Ctx) -> String {
|
||||
let mut attrs = Vec::new();
|
||||
|
||||
for (&sym, &expr) in &self.stcs {
|
||||
let key = ctx.get_sym(sym);
|
||||
let value = ctx.get_ir(expr).compile(ctx);
|
||||
attrs.push(format!("\"{}\": {}", key, value));
|
||||
}
|
||||
|
||||
for (key_expr, value_expr) in &self.dyns {
|
||||
let key = ctx.get_ir(*key_expr).compile(ctx);
|
||||
let value = ctx.get_ir(*value_expr).compile(ctx);
|
||||
attrs.push(format!("[{}]: {}", key, value));
|
||||
}
|
||||
|
||||
format!("{{{}}}", attrs.join(", "))
|
||||
}
|
||||
}
|
||||
191
nix-js/src/context.rs
Normal file
191
nix-js/src/context.rs
Normal file
@@ -0,0 +1,191 @@
|
||||
use std::ptr::NonNull;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use string_interner::DefaultStringInterner;
|
||||
|
||||
use crate::codegen::{CodegenContext, Compile};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::ir::{DowngradeContext, ExprId, Ir, SymId};
|
||||
use crate::value::Value;
|
||||
|
||||
use downgrade::DowngradeCtx;
|
||||
|
||||
mod downgrade;
|
||||
|
||||
pub struct Context {
|
||||
irs: Vec<Ir>,
|
||||
symbols: DefaultStringInterner,
|
||||
global: NonNull<HashMap<crate::ir::SymId, ExprId>>,
|
||||
}
|
||||
|
||||
impl Drop for Context {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
drop(Box::from_raw(self.global.as_ptr()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Context {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
symbols: DefaultStringInterner::new(),
|
||||
irs: Vec::new(),
|
||||
global: unsafe { NonNull::new_unchecked(Box::leak(Box::new(HashMap::new()))) },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Context {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn downgrade_ctx<'a>(&'a mut self) -> DowngradeCtx<'a> {
|
||||
// SAFETY: `global` is readonly
|
||||
let global_ref = unsafe { self.global.as_ref() };
|
||||
DowngradeCtx::new(self, global_ref)
|
||||
}
|
||||
|
||||
pub fn eval(&mut self, expr: &str) -> Result<Value> {
|
||||
let root = rnix::Root::parse(expr);
|
||||
if !root.errors().is_empty() {
|
||||
return Err(Error::parse_error(root.errors().iter().fold(
|
||||
String::new(),
|
||||
|mut acc, err| {
|
||||
acc.push_str(&err.to_string());
|
||||
acc.push_str("; ");
|
||||
acc
|
||||
},
|
||||
)));
|
||||
}
|
||||
let root = self
|
||||
.downgrade_ctx()
|
||||
.downgrade(root.tree().expr().unwrap())?;
|
||||
let code = self.get_ir(root).compile(self);
|
||||
let code = format!("NixRuntime.force({})", code);
|
||||
println!("[DEBUG] generated code: {}", &code);
|
||||
crate::runtime::run(&code)
|
||||
}
|
||||
}
|
||||
|
||||
impl CodegenContext for Context {
|
||||
fn get_ir(&self, id: ExprId) -> &Ir {
|
||||
self.irs.get(id.0).unwrap()
|
||||
}
|
||||
|
||||
fn get_sym(&self, id: SymId) -> &str {
|
||||
self.symbols.resolve(id).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::value::Const;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn basic_eval() {
|
||||
assert_eq!(
|
||||
Context::new().eval("1 + 1").unwrap(),
|
||||
Value::Const(Const::Int(2))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new().eval("(x: x) 1").unwrap(),
|
||||
Value::Const(Const::Int(1))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new().eval("(x: y: x - y) 2 1").unwrap(),
|
||||
Value::Const(Const::Int(1))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new().eval("rec { b = a; a = 1; }.b").unwrap(),
|
||||
Value::Const(Const::Int(1))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new().eval("let b = a; a = 1; in b").unwrap(),
|
||||
Value::Const(Const::Int(1))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new().eval("let fib = n: if n == 1 || n == 2 then 1 else (fib (n - 1)) + (fib (n - 2)); in fib 30").unwrap(),
|
||||
Value::Const(Const::Int(832040))
|
||||
);
|
||||
assert_eq!(
|
||||
Context::new()
|
||||
.eval("((f: let x = f x; in x)(self: { x = 1; y = self.x + 1; })).y")
|
||||
.unwrap(),
|
||||
Value::Const(Const::Int(2))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_check_required() {
|
||||
// Test function with required parameters
|
||||
assert_eq!(
|
||||
Context::new().eval("({ a, b }: a + b) { a = 1; b = 2; }").unwrap(),
|
||||
Value::Const(Const::Int(3))
|
||||
);
|
||||
|
||||
// Test missing required parameter should fail
|
||||
let result = Context::new().eval("({ a, b }: a + b) { a = 1; }");
|
||||
assert!(result.is_err());
|
||||
|
||||
// Test all required parameters present
|
||||
assert_eq!(
|
||||
Context::new().eval("({ x, y, z }: x + y + z) { x = 1; y = 2; z = 3; }").unwrap(),
|
||||
Value::Const(Const::Int(6))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_check_allowed() {
|
||||
// Test function without ellipsis - should reject unexpected arguments
|
||||
let result = Context::new().eval("({ a, b }: a + b) { a = 1; b = 2; c = 3; }");
|
||||
assert!(result.is_err());
|
||||
|
||||
// Test function with ellipsis - should accept extra arguments
|
||||
assert_eq!(
|
||||
Context::new().eval("({ a, b, ... }: a + b) { a = 1; b = 2; c = 3; }").unwrap(),
|
||||
Value::Const(Const::Int(3))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_check_with_default() {
|
||||
// Test function with default parameters
|
||||
assert_eq!(
|
||||
Context::new().eval("({ a, b ? 5 }: a + b) { a = 1; }").unwrap(),
|
||||
Value::Const(Const::Int(6))
|
||||
);
|
||||
|
||||
// Test overriding default parameter
|
||||
assert_eq!(
|
||||
Context::new().eval("({ a, b ? 5 }: a + b) { a = 1; b = 10; }").unwrap(),
|
||||
Value::Const(Const::Int(11))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_param_check_with_alias() {
|
||||
// Test function with @ pattern (alias)
|
||||
assert_eq!(
|
||||
Context::new().eval("(args@{ a, b }: args.a + args.b) { a = 1; b = 2; }").unwrap(),
|
||||
Value::Const(Const::Int(3))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simple_param_no_check() {
|
||||
// Test simple parameter (no pattern) should not have validation
|
||||
assert_eq!(
|
||||
Context::new().eval("(x: x.a + x.b) { a = 1; b = 2; }").unwrap(),
|
||||
Value::Const(Const::Int(3))
|
||||
);
|
||||
|
||||
// Simple parameter accepts any argument
|
||||
assert_eq!(
|
||||
Context::new().eval("(x: x) 42").unwrap(),
|
||||
Value::Const(Const::Int(42))
|
||||
);
|
||||
}
|
||||
}
|
||||
164
nix-js/src/context/downgrade.rs
Normal file
164
nix-js/src/context/downgrade.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use hashbrown::HashMap;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::ir::{ArgId, Downgrade, DowngradeContext, ExprId, Ir, SymId, ToIr};
|
||||
|
||||
use super::Context;
|
||||
|
||||
enum Scope<'ctx> {
|
||||
Global(&'ctx HashMap<SymId, ExprId>),
|
||||
Let(HashMap<SymId, ExprId>),
|
||||
Param(SymId, ExprId),
|
||||
With(ExprId),
|
||||
}
|
||||
|
||||
struct ScopeGuard<'a, 'ctx> {
|
||||
ctx: &'a mut DowngradeCtx<'ctx>,
|
||||
}
|
||||
|
||||
impl<'a, 'ctx> Drop for ScopeGuard<'a, 'ctx> {
|
||||
fn drop(&mut self) {
|
||||
self.ctx.scopes.pop();
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'ctx> ScopeGuard<'a, 'ctx> {
|
||||
fn as_ctx(&mut self) -> &mut DowngradeCtx<'ctx> {
|
||||
self.ctx
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DowngradeCtx<'ctx> {
|
||||
ctx: &'ctx mut Context,
|
||||
irs: Vec<Option<Ir>>,
|
||||
scopes: Vec<Scope<'ctx>>,
|
||||
arg_id: usize,
|
||||
}
|
||||
|
||||
impl<'ctx> DowngradeCtx<'ctx> {
|
||||
pub fn new(ctx: &'ctx mut Context, global: &'ctx HashMap<SymId, ExprId>) -> Self {
|
||||
Self {
|
||||
scopes: vec![Scope::Global(global)],
|
||||
irs: vec![],
|
||||
arg_id: 0,
|
||||
ctx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DowngradeContext for DowngradeCtx<'_> {
|
||||
fn new_expr(&mut self, expr: Ir) -> ExprId {
|
||||
self.irs.push(Some(expr));
|
||||
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
|
||||
}
|
||||
|
||||
fn new_arg(&mut self) -> ExprId {
|
||||
self.irs.push(Some(Ir::Arg(ArgId(self.arg_id))));
|
||||
self.arg_id += 1;
|
||||
ExprId(self.ctx.irs.len() + self.irs.len() - 1)
|
||||
}
|
||||
|
||||
fn new_sym(&mut self, sym: String) -> SymId {
|
||||
self.ctx.symbols.get_or_intern(sym)
|
||||
}
|
||||
|
||||
fn get_sym(&self, id: SymId) -> &str {
|
||||
self.ctx.symbols.resolve(id).unwrap()
|
||||
}
|
||||
|
||||
fn lookup(&mut self, sym: SymId) -> Result<ExprId> {
|
||||
for scope in self.scopes.iter().rev() {
|
||||
match scope {
|
||||
&Scope::Global(global_scope) => {
|
||||
if let Some(&expr) = global_scope.get(&sym) {
|
||||
return Ok(expr);
|
||||
}
|
||||
}
|
||||
Scope::Let(let_scope) => {
|
||||
if let Some(&expr) = let_scope.get(&sym) {
|
||||
// Wrap in ExprRef to reference the binding instead of recompiling
|
||||
return Ok(self.new_expr(Ir::ExprRef(expr)));
|
||||
}
|
||||
}
|
||||
&Scope::Param(param_sym, expr) => {
|
||||
if param_sym == sym {
|
||||
return Ok(expr);
|
||||
}
|
||||
}
|
||||
&Scope::With(_) => (),
|
||||
}
|
||||
}
|
||||
|
||||
let namespaces: Vec<ExprId> = self
|
||||
.scopes
|
||||
.iter()
|
||||
.filter_map(|scope| {
|
||||
if let &Scope::With(namespace) = scope {
|
||||
Some(namespace)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let mut result = None;
|
||||
for namespace in namespaces {
|
||||
use crate::ir::{Attr, Select};
|
||||
let select = Select {
|
||||
expr: namespace,
|
||||
attrpath: vec![Attr::Str(sym)],
|
||||
default: result, // Link to outer With or None
|
||||
};
|
||||
result = Some(self.new_expr(select.to_ir()));
|
||||
}
|
||||
result.ok_or_else(|| Error::downgrade_error(format!("'{}' not found", self.get_sym(sym))))
|
||||
}
|
||||
|
||||
fn extract_expr(&mut self, id: ExprId) -> Ir {
|
||||
self.irs.get_mut(id.0).unwrap().take().unwrap()
|
||||
}
|
||||
|
||||
fn replace_expr(&mut self, id: ExprId, expr: Ir) {
|
||||
let _ = self.irs.get_mut(id.0).unwrap().insert(expr);
|
||||
}
|
||||
|
||||
#[allow(refining_impl_trait)]
|
||||
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<> {
|
||||
self.irs.extend(std::iter::repeat_with(|| None).take(slots));
|
||||
(self.irs.len() - slots..self.irs.len()).map(ExprId)
|
||||
}
|
||||
|
||||
fn downgrade(mut self, root: rnix::ast::Expr) -> Result<ExprId> {
|
||||
let root = root.downgrade(&mut self)?;
|
||||
self.ctx
|
||||
.irs
|
||||
.extend(self.irs.into_iter().map(Option::unwrap));
|
||||
Ok(root)
|
||||
}
|
||||
|
||||
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R,
|
||||
{
|
||||
self.scopes.push(Scope::Let(bindings));
|
||||
let mut guard = ScopeGuard { ctx: self };
|
||||
f(guard.as_ctx())
|
||||
}
|
||||
|
||||
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R,
|
||||
{
|
||||
self.scopes.push(Scope::Param(param, arg));
|
||||
let mut guard = ScopeGuard { ctx: self };
|
||||
f(guard.as_ctx())
|
||||
}
|
||||
|
||||
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R,
|
||||
{
|
||||
self.scopes.push(Scope::With(namespace));
|
||||
let mut guard = ScopeGuard { ctx: self };
|
||||
f(guard.as_ctx())
|
||||
}
|
||||
}
|
||||
124
nix-js/src/error.rs
Normal file
124
nix-js/src/error.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use std::rc::Rc;
|
||||
use thiserror::Error;
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ErrorKind {
|
||||
#[error("error occurred during parse stage: {0}")]
|
||||
ParseError(String),
|
||||
#[error("error occurred during downgrade stage: {0}")]
|
||||
DowngradeError(String),
|
||||
#[error("error occurred during evaluation stage: {0}")]
|
||||
EvalError(String),
|
||||
#[error("{0}")]
|
||||
Catchable(String),
|
||||
#[error("an unknown or unexpected error occurred")]
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
pub kind: ErrorKind,
|
||||
pub span: Option<rnix::TextRange>,
|
||||
pub source: Option<Rc<str>>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
// Basic display
|
||||
write!(f, "{}", self.kind)?;
|
||||
|
||||
// If we have source and span, print context
|
||||
if let (Some(source), Some(span)) = (&self.source, self.span) {
|
||||
let start_byte = usize::from(span.start());
|
||||
let end_byte = usize::from(span.end());
|
||||
|
||||
if start_byte > source.len() || end_byte > source.len() {
|
||||
return Ok(()); // Span is out of bounds
|
||||
}
|
||||
|
||||
let mut start_line = 1;
|
||||
let mut start_col = 1usize;
|
||||
let mut line_start_byte = 0;
|
||||
for (i, c) in source.char_indices() {
|
||||
if i >= start_byte {
|
||||
break;
|
||||
}
|
||||
if c == '\n' {
|
||||
start_line += 1;
|
||||
start_col = 1;
|
||||
line_start_byte = i + 1;
|
||||
} else {
|
||||
start_col += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let line_end_byte = source[line_start_byte..]
|
||||
.find('\n')
|
||||
.map(|i| line_start_byte + i)
|
||||
.unwrap_or(source.len());
|
||||
|
||||
let line_str = &source[line_start_byte..line_end_byte];
|
||||
|
||||
let underline_len = if end_byte > start_byte {
|
||||
end_byte - start_byte
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
write!(f, "\n --> {}:{}", start_line, start_col)?;
|
||||
write!(f, "\n |\n")?;
|
||||
writeln!(f, "{:4} | {}", start_line, line_str)?;
|
||||
write!(
|
||||
f,
|
||||
" | {}{}",
|
||||
" ".repeat(start_col.saturating_sub(1)),
|
||||
"^".repeat(underline_len)
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
Some(&self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn new(kind: ErrorKind) -> Self {
|
||||
Self {
|
||||
kind,
|
||||
span: None,
|
||||
source: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_span(mut self, span: rnix::TextRange) -> Self {
|
||||
self.span = Some(span);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_source(mut self, source: Rc<str>) -> Self {
|
||||
self.source = Some(source);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn parse_error(msg: String) -> Self {
|
||||
Self::new(ErrorKind::ParseError(msg))
|
||||
}
|
||||
pub fn downgrade_error(msg: String) -> Self {
|
||||
Self::new(ErrorKind::DowngradeError(msg))
|
||||
}
|
||||
pub fn eval_error(msg: String) -> Self {
|
||||
Self::new(ErrorKind::EvalError(msg))
|
||||
}
|
||||
pub fn catchable(msg: String) -> Self {
|
||||
Self::new(ErrorKind::Catchable(msg))
|
||||
}
|
||||
pub fn unknown() -> Self {
|
||||
Self::new(ErrorKind::Unknown)
|
||||
}
|
||||
}
|
||||
393
nix-js/src/ir.rs
Normal file
393
nix-js/src/ir.rs
Normal file
@@ -0,0 +1,393 @@
|
||||
use derive_more::{IsVariant, TryUnwrap, Unwrap};
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use rnix::ast;
|
||||
use string_interner::symbol::SymbolU32;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::value::Const as PubConst;
|
||||
use crate::value::format_symbol;
|
||||
use nix_js_macros::ir;
|
||||
|
||||
mod downgrade;
|
||||
mod utils;
|
||||
use utils::*;
|
||||
|
||||
pub use downgrade::Downgrade;
|
||||
|
||||
pub trait DowngradeContext {
|
||||
fn downgrade(self, expr: rnix::ast::Expr) -> Result<ExprId>;
|
||||
|
||||
fn new_expr(&mut self, expr: Ir) -> ExprId;
|
||||
fn new_arg(&mut self) -> ExprId;
|
||||
|
||||
fn new_sym(&mut self, sym: String) -> SymId;
|
||||
fn get_sym(&self, id: SymId) -> &str;
|
||||
fn lookup(&mut self, sym: SymId) -> Result<ExprId>;
|
||||
|
||||
fn extract_expr(&mut self, id: ExprId) -> Ir;
|
||||
fn replace_expr(&mut self, id: ExprId, expr: Ir);
|
||||
fn reserve_slots(&mut self, slots: usize) -> impl Iterator<Item = ExprId> + Clone + use<Self>;
|
||||
|
||||
fn with_param_scope<F, R>(&mut self, param: SymId, arg: ExprId, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R;
|
||||
fn with_let_scope<F, R>(&mut self, bindings: HashMap<SymId, ExprId>, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R;
|
||||
fn with_with_scope<F, R>(&mut self, namespace: ExprId, f: F) -> R
|
||||
where
|
||||
F: FnOnce(&mut Self) -> R;
|
||||
}
|
||||
|
||||
ir! {
|
||||
Ir,
|
||||
|
||||
AttrSet,
|
||||
List,
|
||||
HasAttr,
|
||||
BinOp,
|
||||
UnOp,
|
||||
Select,
|
||||
If,
|
||||
Call,
|
||||
With,
|
||||
Assert,
|
||||
ConcatStrings,
|
||||
Const,
|
||||
Str,
|
||||
Path,
|
||||
Func,
|
||||
Let,
|
||||
Arg(ArgId),
|
||||
PrimOp(PrimOpId),
|
||||
ExprRef(ExprId),
|
||||
Thunk(ExprId),
|
||||
}
|
||||
|
||||
impl AttrSet {
|
||||
fn _insert(
|
||||
&mut self,
|
||||
mut path: impl Iterator<Item = Attr>,
|
||||
name: Attr,
|
||||
value: ExprId,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<()> {
|
||||
if let Some(attr) = path.next() {
|
||||
// If the path is not yet exhausted, we need to recurse deeper.
|
||||
match attr {
|
||||
Attr::Str(ident) => {
|
||||
// If the next attribute is a static string.
|
||||
if let Some(&id) = self.stcs.get(&ident) {
|
||||
// If a sub-attrset already exists, recurse into it.
|
||||
let mut ir = ctx.extract_expr(id);
|
||||
let result = ir
|
||||
.as_mut()
|
||||
.try_unwrap_attr_set()
|
||||
.map_err(|_| {
|
||||
// This path segment exists but is not an attrset, which is an error.
|
||||
Error::downgrade_error(format!(
|
||||
"attribute '{}' already defined but is not an attribute set",
|
||||
format_symbol(ctx.get_sym(ident))
|
||||
))
|
||||
})
|
||||
.and_then(|attrs| attrs._insert(path, name, value, ctx));
|
||||
ctx.replace_expr(id, ir);
|
||||
result?;
|
||||
} else {
|
||||
// Create a new sub-attrset because this path doesn't exist yet.
|
||||
let mut attrs = AttrSet::default();
|
||||
attrs._insert(path, name, value, ctx)?;
|
||||
let attrs = ctx.new_expr(attrs.to_ir());
|
||||
self.stcs.insert(ident, attrs);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Attr::Dynamic(dynamic) => {
|
||||
// If the next attribute is a dynamic expression, we must create a new sub-attrset.
|
||||
// We cannot merge with existing dynamic attributes at this stage.
|
||||
let mut attrs = AttrSet::default();
|
||||
attrs._insert(path, name, value, ctx)?;
|
||||
self.dyns.push((dynamic, ctx.new_expr(attrs.to_ir())));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// This is the final attribute in the path, so insert the value here.
|
||||
match name {
|
||||
Attr::Str(ident) => {
|
||||
if self.stcs.insert(ident, value).is_some() {
|
||||
return Err(Error::downgrade_error(format!(
|
||||
"attribute '{}' already defined",
|
||||
format_symbol(ctx.get_sym(ident))
|
||||
)));
|
||||
}
|
||||
}
|
||||
Attr::Dynamic(dynamic) => {
|
||||
self.dyns.push((dynamic, value));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn insert(
|
||||
&mut self,
|
||||
path: Vec<Attr>,
|
||||
value: ExprId,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<()> {
|
||||
let mut path = path.into_iter();
|
||||
// The last part of the path is the name of the attribute to be inserted.
|
||||
let name = path.next_back().unwrap();
|
||||
self._insert(path, name, value, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(transparent)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ExprId(pub usize);
|
||||
|
||||
pub type SymId = SymbolU32;
|
||||
|
||||
#[repr(transparent)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct PrimOpId(pub usize);
|
||||
|
||||
#[repr(transparent)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ArgId(pub usize);
|
||||
|
||||
/// Represents a Nix attribute set.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AttrSet {
|
||||
/// Statically known attributes (key is a string).
|
||||
pub stcs: HashMap<SymId, ExprId>,
|
||||
/// Dynamically computed attributes, where both the key and value are expressions.
|
||||
pub dyns: Vec<(ExprId, ExprId)>,
|
||||
}
|
||||
|
||||
/// Represents a key in an attribute path.
|
||||
#[derive(Debug, TryUnwrap)]
|
||||
pub enum Attr {
|
||||
/// A dynamic attribute key, which is an expression that must evaluate to a string.
|
||||
/// Example: `attrs.${key}`
|
||||
Dynamic(ExprId),
|
||||
/// A static attribute key.
|
||||
/// Example: `attrs.key`
|
||||
Str(SymId),
|
||||
}
|
||||
|
||||
/// Represents a Nix list.
|
||||
#[derive(Debug)]
|
||||
pub struct List {
|
||||
/// The expressions that are elements of the list.
|
||||
pub items: Vec<ExprId>,
|
||||
}
|
||||
|
||||
/// Represents a "has attribute" check (`?` operator).
|
||||
#[derive(Debug)]
|
||||
pub struct HasAttr {
|
||||
/// The expression to check for the attribute (the left-hand side).
|
||||
pub lhs: ExprId,
|
||||
/// The attribute path to look for (the right-hand side).
|
||||
pub rhs: Vec<Attr>,
|
||||
}
|
||||
|
||||
/// Represents a binary operation.
|
||||
#[derive(Debug)]
|
||||
pub struct BinOp {
|
||||
pub lhs: ExprId,
|
||||
pub rhs: ExprId,
|
||||
pub kind: BinOpKind,
|
||||
}
|
||||
|
||||
/// The kinds of binary operations supported in Nix.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum BinOpKind {
|
||||
// Arithmetic
|
||||
Add,
|
||||
Sub,
|
||||
Div,
|
||||
Mul,
|
||||
|
||||
// Comparison
|
||||
Eq,
|
||||
Neq,
|
||||
Lt,
|
||||
Gt,
|
||||
Leq,
|
||||
Geq,
|
||||
|
||||
// Logical
|
||||
And,
|
||||
Or,
|
||||
Impl,
|
||||
|
||||
// Set/String/Path operations
|
||||
Con, // List concatenation (`++`)
|
||||
Upd, // AttrSet update (`//`)
|
||||
|
||||
// Not standard, but part of rnix AST
|
||||
PipeL,
|
||||
PipeR,
|
||||
}
|
||||
|
||||
impl From<ast::BinOpKind> for BinOpKind {
|
||||
fn from(op: ast::BinOpKind) -> Self {
|
||||
use BinOpKind::*;
|
||||
use ast::BinOpKind as kind;
|
||||
match op {
|
||||
kind::Concat => Con,
|
||||
kind::Update => Upd,
|
||||
kind::Add => Add,
|
||||
kind::Sub => Sub,
|
||||
kind::Mul => Mul,
|
||||
kind::Div => Div,
|
||||
kind::And => And,
|
||||
kind::Equal => Eq,
|
||||
kind::Implication => Impl,
|
||||
kind::Less => Lt,
|
||||
kind::LessOrEq => Leq,
|
||||
kind::More => Gt,
|
||||
kind::MoreOrEq => Geq,
|
||||
kind::NotEqual => Neq,
|
||||
kind::Or => Or,
|
||||
kind::PipeLeft => PipeL,
|
||||
kind::PipeRight => PipeR,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a unary operation.
|
||||
#[derive(Debug)]
|
||||
pub struct UnOp {
|
||||
pub rhs: ExprId,
|
||||
pub kind: UnOpKind,
|
||||
}
|
||||
|
||||
/// The kinds of unary operations.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum UnOpKind {
|
||||
Neg, // Negation (`-`)
|
||||
Not, // Logical not (`!`)
|
||||
}
|
||||
|
||||
impl From<ast::UnaryOpKind> for UnOpKind {
|
||||
fn from(value: ast::UnaryOpKind) -> Self {
|
||||
match value {
|
||||
ast::UnaryOpKind::Invert => UnOpKind::Not,
|
||||
ast::UnaryOpKind::Negate => UnOpKind::Neg,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents an attribute selection from an attribute set.
|
||||
#[derive(Debug)]
|
||||
pub struct Select {
|
||||
/// The expression that should evaluate to an attribute set.
|
||||
pub expr: ExprId,
|
||||
/// The path of attributes to select.
|
||||
pub attrpath: Vec<Attr>,
|
||||
/// An optional default value to return if the selection fails.
|
||||
pub default: Option<ExprId>,
|
||||
}
|
||||
|
||||
/// Represents an `if-then-else` expression.
|
||||
#[derive(Debug)]
|
||||
pub struct If {
|
||||
pub cond: ExprId,
|
||||
pub consq: ExprId, // Consequence (then branch)
|
||||
pub alter: ExprId, // Alternative (else branch)
|
||||
}
|
||||
|
||||
/// Represents a function value (a lambda).
|
||||
#[derive(Debug)]
|
||||
pub struct Func {
|
||||
/// The body of the function
|
||||
pub body: ExprId,
|
||||
/// The parameter specification for the function.
|
||||
pub param: Param,
|
||||
|
||||
pub arg: ExprId,
|
||||
}
|
||||
|
||||
/// Represents a `let ... in ...` expression.
|
||||
#[derive(Debug)]
|
||||
pub struct Let {
|
||||
/// The bindings in the let expression.
|
||||
pub bindings: Vec<ExprId>,
|
||||
/// The body expression evaluated in the scope of the bindings.
|
||||
pub body: ExprId,
|
||||
}
|
||||
|
||||
/// Describes the parameters of a function.
|
||||
#[derive(Debug)]
|
||||
pub struct Param {
|
||||
/// The name of the argument if it's a simple identifier (e.g., `x: ...`).
|
||||
/// Also used for the alias in a pattern (e.g., `args @ { ... }`).
|
||||
pub ident: Option<SymId>,
|
||||
/// The set of required parameter names for a pattern-matching function.
|
||||
pub required: Option<Vec<SymId>>,
|
||||
/// The set of all allowed parameter names for a non-ellipsis pattern-matching function.
|
||||
/// If `None`, any attribute is allowed (ellipsis `...` is present).
|
||||
pub allowed: Option<HashSet<SymId>>,
|
||||
}
|
||||
|
||||
/// Represents a function call.
|
||||
#[derive(Debug)]
|
||||
pub struct Call {
|
||||
/// The expression that evaluates to the function to be called.
|
||||
pub func: ExprId,
|
||||
pub arg: ExprId,
|
||||
}
|
||||
|
||||
/// Represents a `with` expression.
|
||||
#[derive(Debug)]
|
||||
pub struct With {
|
||||
/// The namespace to bring into scope.
|
||||
pub namespace: ExprId,
|
||||
/// The expression to be evaluated within the new scope.
|
||||
pub expr: ExprId,
|
||||
}
|
||||
|
||||
/// Represents an `assert` expression.
|
||||
#[derive(Debug)]
|
||||
pub struct Assert {
|
||||
/// The condition to assert.
|
||||
pub assertion: ExprId,
|
||||
/// The expression to return if the assertion is true.
|
||||
pub expr: ExprId,
|
||||
}
|
||||
|
||||
/// Represents the concatenation of multiple string expressions.
|
||||
/// This is typically the result of downgrading an interpolated string.
|
||||
#[derive(Debug)]
|
||||
pub struct ConcatStrings {
|
||||
pub parts: Vec<ExprId>,
|
||||
}
|
||||
|
||||
/// Represents a constant value (e.g., integer, float, boolean, null).
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Const {
|
||||
pub val: PubConst,
|
||||
}
|
||||
|
||||
impl<T: Into<PubConst>> From<T> for Const {
|
||||
fn from(value: T) -> Self {
|
||||
Self { val: value.into() }
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a simple, non-interpolated string literal.
|
||||
#[derive(Debug)]
|
||||
pub struct Str {
|
||||
pub val: String,
|
||||
}
|
||||
|
||||
/// Represents a path literal.
|
||||
#[derive(Debug)]
|
||||
pub struct Path {
|
||||
/// The expression that evaluates to the string content of the path.
|
||||
/// This can be a simple `Str` or a `ConcatStrings` for interpolated paths.
|
||||
pub expr: ExprId,
|
||||
}
|
||||
389
nix-js/src/ir/downgrade.rs
Normal file
389
nix-js/src/ir/downgrade.rs
Normal file
@@ -0,0 +1,389 @@
|
||||
use rnix::ast::{self, Expr, HasEntry};
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
use super::*;
|
||||
|
||||
pub trait Downgrade<Ctx: DowngradeContext> {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId>;
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for Expr {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
use Expr::*;
|
||||
match self {
|
||||
Apply(apply) => apply.downgrade(ctx),
|
||||
Assert(assert) => assert.downgrade(ctx),
|
||||
Error(error) => Err(self::Error::downgrade_error(error.to_string())),
|
||||
IfElse(ifelse) => ifelse.downgrade(ctx),
|
||||
Select(select) => select.downgrade(ctx),
|
||||
Str(str) => str.downgrade(ctx),
|
||||
Path(path) => path.downgrade(ctx),
|
||||
Literal(lit) => lit.downgrade(ctx),
|
||||
Lambda(lambda) => lambda.downgrade(ctx),
|
||||
LegacyLet(let_) => let_.downgrade(ctx),
|
||||
LetIn(letin) => letin.downgrade(ctx),
|
||||
List(list) => list.downgrade(ctx),
|
||||
BinOp(op) => op.downgrade(ctx),
|
||||
AttrSet(attrs) => attrs.downgrade(ctx),
|
||||
UnaryOp(op) => op.downgrade(ctx),
|
||||
Ident(ident) => ident.downgrade(ctx),
|
||||
With(with) => with.downgrade(ctx),
|
||||
HasAttr(has) => has.downgrade(ctx),
|
||||
Paren(paren) => paren.expr().unwrap().downgrade(ctx),
|
||||
Root(root) => root.expr().unwrap().downgrade(ctx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Assert {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let assertion = self.condition().unwrap().downgrade(ctx)?;
|
||||
let expr = self.body().unwrap().downgrade(ctx)?;
|
||||
Ok(ctx.new_expr(Assert { assertion, expr }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::IfElse {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let cond = self.condition().unwrap().downgrade(ctx)?;
|
||||
let consq = self.body().unwrap().downgrade(ctx)?;
|
||||
let alter = self.else_body().unwrap().downgrade(ctx)?;
|
||||
Ok(ctx.new_expr(If { cond, consq, alter }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Path {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let parts = self
|
||||
.parts()
|
||||
.map(|part| match part {
|
||||
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(
|
||||
Str {
|
||||
val: lit.to_string(),
|
||||
}
|
||||
.to_ir(),
|
||||
)),
|
||||
ast::InterpolPart::Interpolation(interpol) => {
|
||||
interpol.expr().unwrap().downgrade(ctx)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
let expr = if parts.len() == 1 {
|
||||
parts.into_iter().next().unwrap()
|
||||
} else {
|
||||
ctx.new_expr(ConcatStrings { parts }.to_ir())
|
||||
};
|
||||
Ok(ctx.new_expr(Path { expr }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Str {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let parts = self
|
||||
.normalized_parts()
|
||||
.into_iter()
|
||||
.map(|part| match part {
|
||||
ast::InterpolPart::Literal(lit) => Ok(ctx.new_expr(Str { val: lit }.to_ir())),
|
||||
ast::InterpolPart::Interpolation(interpol) => {
|
||||
interpol.expr().unwrap().downgrade(ctx)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(if parts.len() == 1 {
|
||||
parts.into_iter().next().unwrap()
|
||||
} else {
|
||||
ctx.new_expr(ConcatStrings { parts }.to_ir())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Literal {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
Ok(ctx.new_expr(match self.kind() {
|
||||
ast::LiteralKind::Integer(int) => Const::from(int.value().unwrap()).to_ir(),
|
||||
ast::LiteralKind::Float(float) => Const::from(float.value().unwrap()).to_ir(),
|
||||
ast::LiteralKind::Uri(uri) => Str {
|
||||
val: uri.to_string(),
|
||||
}
|
||||
.to_ir(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Ident {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let sym = self.ident_token().unwrap().to_string();
|
||||
let sym = ctx.new_sym(sym);
|
||||
ctx.lookup(sym)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::AttrSet {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let rec = self.rec_token().is_some();
|
||||
|
||||
if !rec {
|
||||
let attrs = downgrade_attrs(self, ctx)?;
|
||||
return Ok(ctx.new_expr(attrs.to_ir()));
|
||||
}
|
||||
|
||||
// rec { a = 1; b = a; } => let a = 1; b = a; in { inherit a b; }
|
||||
|
||||
let entries: Vec<_> = self.entries().collect();
|
||||
|
||||
let (bindings, body) = downgrade_let_bindings(entries, ctx, |ctx, binding_keys| {
|
||||
// Create plain attrset as body with inherit
|
||||
let mut attrs = AttrSet {
|
||||
stcs: HashMap::new(),
|
||||
dyns: Vec::new(),
|
||||
};
|
||||
|
||||
for sym in binding_keys {
|
||||
let expr = ctx.lookup(*sym)?;
|
||||
attrs.stcs.insert(*sym, expr);
|
||||
}
|
||||
|
||||
Ok(ctx.new_expr(attrs.to_ir()))
|
||||
})?;
|
||||
|
||||
// Create Let expression
|
||||
Ok(ctx.new_expr(Let { bindings, body }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a list.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::List {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let items = self
|
||||
.items()
|
||||
.map(|item| maybe_thunk(item, ctx))
|
||||
.collect::<Result<_>>()?;
|
||||
Ok(ctx.new_expr(List { items }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a binary operation.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::BinOp {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let lhs = self.lhs().unwrap().downgrade(ctx)?;
|
||||
let rhs = self.rhs().unwrap().downgrade(ctx)?;
|
||||
let kind = self.operator().unwrap().into();
|
||||
Ok(ctx.new_expr(BinOp { lhs, rhs, kind }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a "has attribute" (`?`) expression.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::HasAttr {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let lhs = self.expr().unwrap().downgrade(ctx)?;
|
||||
let rhs = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
|
||||
Ok(ctx.new_expr(HasAttr { lhs, rhs }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a unary operation.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::UnaryOp {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let rhs = self.expr().unwrap().downgrade(ctx)?;
|
||||
let kind = self.operator().unwrap().into();
|
||||
Ok(ctx.new_expr(UnOp { rhs, kind }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades an attribute selection (`.`).
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Select {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let expr = self.expr().unwrap().downgrade(ctx)?;
|
||||
let attrpath = downgrade_attrpath(self.attrpath().unwrap(), ctx)?;
|
||||
let default = if let Some(default) = self.default_expr() {
|
||||
Some(default.downgrade(ctx)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ctx.new_expr(
|
||||
Select {
|
||||
expr,
|
||||
attrpath,
|
||||
default,
|
||||
}
|
||||
.to_ir(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a `legacy let`, which is essentially a recursive attribute set.
|
||||
/// The body of the `let` is accessed via `let.body`.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LegacyLet {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let bindings = downgrade_static_attrs(self, ctx)?;
|
||||
let binding_keys: Vec<_> = bindings.keys().copied().collect();
|
||||
|
||||
let attrset_expr = ctx.with_let_scope(bindings, |ctx| {
|
||||
let mut attrs = AttrSet {
|
||||
stcs: HashMap::new(),
|
||||
dyns: Vec::new(),
|
||||
};
|
||||
|
||||
for sym in binding_keys {
|
||||
let expr = ctx.lookup(sym)?;
|
||||
attrs.stcs.insert(sym, expr);
|
||||
}
|
||||
|
||||
Ok(ctx.new_expr(attrs.to_ir()))
|
||||
})?;
|
||||
|
||||
let body_sym = ctx.new_sym("body".to_string());
|
||||
let select = Select {
|
||||
expr: attrset_expr,
|
||||
attrpath: vec![Attr::Str(body_sym)],
|
||||
default: None,
|
||||
};
|
||||
|
||||
Ok(ctx.new_expr(select.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a `let ... in ...` expression.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::LetIn {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let entries: Vec<_> = self.entries().collect();
|
||||
let body_expr = self.body().unwrap();
|
||||
|
||||
let (bindings, body) = downgrade_let_bindings(entries, ctx, |ctx, _binding_keys| {
|
||||
body_expr.downgrade(ctx)
|
||||
})?;
|
||||
|
||||
Ok(ctx.new_expr(Let { bindings, body }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a `with` expression.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::With {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
// with namespace; expr
|
||||
let namespace = self.namespace().unwrap().downgrade(ctx)?;
|
||||
|
||||
// Downgrade body in With scope
|
||||
let expr = ctx.with_with_scope(namespace, |ctx| self.body().unwrap().downgrade(ctx))?;
|
||||
|
||||
Ok(expr)
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a lambda (function) expression.
|
||||
/// This involves desugaring pattern-matching arguments into `let` bindings.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Lambda {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let arg = ctx.new_arg();
|
||||
|
||||
let ident;
|
||||
let required;
|
||||
let allowed;
|
||||
let body;
|
||||
|
||||
match self.param().unwrap() {
|
||||
ast::Param::IdentParam(id) => {
|
||||
// Simple case: `x: body`
|
||||
let param_sym = ctx.new_sym(id.to_string());
|
||||
ident = Some(param_sym);
|
||||
required = None;
|
||||
allowed = None;
|
||||
|
||||
// Downgrade body in Param scope
|
||||
body = ctx
|
||||
.with_param_scope(param_sym, arg, |ctx| self.body().unwrap().downgrade(ctx))?;
|
||||
}
|
||||
ast::Param::Pattern(pattern) => {
|
||||
// Complex case: `{ a, b ? 2, ... }@args: body`
|
||||
let alias = pattern
|
||||
.pat_bind()
|
||||
.map(|alias| ctx.new_sym(alias.ident().unwrap().to_string()));
|
||||
ident = alias;
|
||||
|
||||
let entries = pattern
|
||||
.pat_entries()
|
||||
.map(|entry| {
|
||||
let ident = ctx.new_sym(entry.ident().unwrap().to_string());
|
||||
if entry.default().is_none() {
|
||||
Ok((ident, None))
|
||||
} else {
|
||||
entry
|
||||
.default()
|
||||
.unwrap()
|
||||
.downgrade(ctx)
|
||||
.map(|ok| (ident, Some(ok)))
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
required = Some(
|
||||
entries
|
||||
.iter()
|
||||
.filter_map(|(k, d)| if d.is_none() { Some(*k) } else { None })
|
||||
.collect(),
|
||||
);
|
||||
allowed = if pattern.ellipsis_token().is_some() {
|
||||
None // `...` means any attribute is allowed.
|
||||
} else {
|
||||
Some(entries.iter().map(|(k, _)| *k).collect())
|
||||
};
|
||||
|
||||
// Desugar pattern matching in function arguments into a `let` expression.
|
||||
// For example, `({ a, b ? 2 }): a + b` is desugared into:
|
||||
// `arg: let a = arg.a; b = arg.b or 2; in a + b`
|
||||
let mut bindings: HashMap<_, _> = entries
|
||||
.into_iter()
|
||||
.map(|(k, default)| {
|
||||
// For each formal parameter, create a `Select` expression to extract it from the argument set.
|
||||
(
|
||||
k,
|
||||
ctx.new_expr(
|
||||
Select {
|
||||
expr: arg,
|
||||
attrpath: vec![Attr::Str(k)],
|
||||
default,
|
||||
}
|
||||
.to_ir(),
|
||||
),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// If there's an alias (`... }@alias`), bind the alias name to the raw argument set.
|
||||
if let Some(alias) = alias {
|
||||
bindings.insert(alias, arg);
|
||||
}
|
||||
|
||||
// Downgrade body in Let scope and create Let expression
|
||||
let bindings_vec: Vec<ExprId> = bindings.values().copied().collect();
|
||||
let inner_body = ctx.with_let_scope(bindings, |ctx| self.body().unwrap().downgrade(ctx))?;
|
||||
|
||||
// Create Let expression to wrap the bindings
|
||||
body = ctx.new_expr(Let {
|
||||
bindings: bindings_vec,
|
||||
body: inner_body,
|
||||
}.to_ir());
|
||||
}
|
||||
}
|
||||
|
||||
let param = Param {
|
||||
ident,
|
||||
required,
|
||||
allowed,
|
||||
};
|
||||
// The function's body and parameters are now stored directly in the `Func` node.
|
||||
Ok(ctx.new_expr(Func { body, param, arg }.to_ir()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades a function application.
|
||||
/// In Nix, function application is left-associative, so `f a b` should be parsed as `((f a) b)`.
|
||||
/// Each Apply node represents a single function call with one argument.
|
||||
impl<Ctx: DowngradeContext> Downgrade<Ctx> for ast::Apply {
|
||||
fn downgrade(self, ctx: &mut Ctx) -> Result<ExprId> {
|
||||
let func = self.lambda().unwrap().downgrade(ctx)?;
|
||||
let arg = maybe_thunk(self.argument().unwrap(), ctx)?;
|
||||
Ok(ctx.new_expr(Call { func, arg }.to_ir()))
|
||||
}
|
||||
}
|
||||
303
nix-js/src/ir/utils.rs
Normal file
303
nix-js/src/ir/utils.rs
Normal file
@@ -0,0 +1,303 @@
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use rnix::ast;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::ir::{Attr, AttrSet, ConcatStrings, ExprId, Ir, Select, Str, SymId};
|
||||
use crate::value::format_symbol;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub fn maybe_thunk(mut expr: ast::Expr, ctx: &mut impl DowngradeContext) -> Result<ExprId> {
|
||||
use ast::Expr::*;
|
||||
let expr = loop {
|
||||
expr = match expr {
|
||||
Paren(paren) => paren.expr().unwrap(),
|
||||
Root(root) => root.expr().unwrap(),
|
||||
expr => break expr,
|
||||
}
|
||||
};
|
||||
match expr {
|
||||
Error(error) => return Err(self::Error::downgrade_error(error.to_string())),
|
||||
Ident(ident) => return ident.downgrade(ctx),
|
||||
Literal(lit) => return lit.downgrade(ctx),
|
||||
Str(str) => return str.downgrade(ctx),
|
||||
Path(path) => return path.downgrade(ctx),
|
||||
|
||||
_ => (),
|
||||
}
|
||||
let id = match expr {
|
||||
Apply(apply) => apply.downgrade(ctx),
|
||||
Assert(assert) => assert.downgrade(ctx),
|
||||
IfElse(ifelse) => ifelse.downgrade(ctx),
|
||||
Select(select) => select.downgrade(ctx),
|
||||
Lambda(lambda) => lambda.downgrade(ctx),
|
||||
LegacyLet(let_) => let_.downgrade(ctx),
|
||||
LetIn(letin) => letin.downgrade(ctx),
|
||||
List(list) => list.downgrade(ctx),
|
||||
BinOp(op) => op.downgrade(ctx),
|
||||
AttrSet(attrs) => attrs.downgrade(ctx),
|
||||
UnaryOp(op) => op.downgrade(ctx),
|
||||
With(with) => with.downgrade(ctx),
|
||||
HasAttr(has) => has.downgrade(ctx),
|
||||
|
||||
_ => unreachable!(),
|
||||
}?;
|
||||
Ok(ctx.new_expr(Ir::Thunk(id)))
|
||||
}
|
||||
|
||||
/// Downgrades the entries of an attribute set.
|
||||
/// This handles `inherit` and `attrpath = value;` entries.
|
||||
pub fn downgrade_attrs(
|
||||
attrs: impl ast::HasEntry,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<AttrSet> {
|
||||
let entries = attrs.entries();
|
||||
let mut attrs = AttrSet {
|
||||
stcs: HashMap::new(),
|
||||
dyns: Vec::new(),
|
||||
};
|
||||
|
||||
for entry in entries {
|
||||
match entry {
|
||||
ast::Entry::Inherit(inherit) => downgrade_inherit(inherit, &mut attrs.stcs, ctx)?,
|
||||
ast::Entry::AttrpathValue(value) => downgrade_attrpathvalue(value, &mut attrs, ctx)?,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(attrs)
|
||||
}
|
||||
|
||||
/// Downgrades attribute set entries for a `let...in` expression.
|
||||
/// This is a stricter version of `downgrade_attrs` that disallows dynamic attributes,
|
||||
/// as `let` bindings must be statically known.
|
||||
pub fn downgrade_static_attrs(
|
||||
attrs: impl ast::HasEntry,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<HashMap<SymId, ExprId>> {
|
||||
let entries = attrs.entries();
|
||||
let mut attrs = AttrSet {
|
||||
stcs: HashMap::new(),
|
||||
dyns: Vec::new(),
|
||||
};
|
||||
|
||||
for entry in entries {
|
||||
match entry {
|
||||
ast::Entry::Inherit(inherit) => downgrade_inherit(inherit, &mut attrs.stcs, ctx)?,
|
||||
ast::Entry::AttrpathValue(value) => {
|
||||
downgrade_static_attrpathvalue(value, &mut attrs, ctx)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(attrs.stcs)
|
||||
}
|
||||
|
||||
/// Downgrades an `inherit` statement.
|
||||
/// `inherit (from) a b;` is translated into `a = from.a; b = from.b;`.
|
||||
/// `inherit a b;` is translated into `a = a; b = b;` (i.e., bringing variables into scope).
|
||||
pub fn downgrade_inherit(
|
||||
inherit: ast::Inherit,
|
||||
stcs: &mut HashMap<SymId, ExprId>,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<()> {
|
||||
// Downgrade the `from` expression if it exists.
|
||||
let from = if let Some(from) = inherit.from() {
|
||||
Some(from.expr().unwrap().downgrade(ctx)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
for attr in inherit.attrs() {
|
||||
let ident = match downgrade_attr(attr, ctx)? {
|
||||
Attr::Str(ident) => ident,
|
||||
_ => {
|
||||
// `inherit` does not allow dynamic attributes.
|
||||
return Err(Error::downgrade_error(
|
||||
"dynamic attributes not allowed in inherit".to_string(),
|
||||
));
|
||||
}
|
||||
};
|
||||
let expr = if let Some(expr) = from {
|
||||
ctx.new_expr(
|
||||
Select {
|
||||
expr,
|
||||
attrpath: vec![Attr::Str(ident)],
|
||||
default: None,
|
||||
}
|
||||
.to_ir(),
|
||||
)
|
||||
} else {
|
||||
ctx.lookup(ident)?
|
||||
};
|
||||
match stcs.entry(ident) {
|
||||
Entry::Occupied(occupied) => {
|
||||
return Err(Error::eval_error(format!(
|
||||
"attribute '{}' already defined",
|
||||
format_symbol(ctx.get_sym(*occupied.key()))
|
||||
)));
|
||||
}
|
||||
Entry::Vacant(vacant) => vacant.insert(expr),
|
||||
};
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Downgrades a single attribute key (part of an attribute path).
|
||||
/// An attribute can be a static identifier, an interpolated string, or a dynamic expression.
|
||||
pub fn downgrade_attr(attr: ast::Attr, ctx: &mut impl DowngradeContext) -> Result<Attr> {
|
||||
use ast::Attr::*;
|
||||
use ast::InterpolPart::*;
|
||||
match attr {
|
||||
Ident(ident) => Ok(Attr::Str(ctx.new_sym(ident.to_string()))),
|
||||
Str(string) => {
|
||||
let parts = string.normalized_parts();
|
||||
if parts.is_empty() {
|
||||
Ok(Attr::Str(ctx.new_sym("".to_string())))
|
||||
} else if parts.len() == 1 {
|
||||
// If the string has only one part, it's either a literal or a single interpolation.
|
||||
match parts.into_iter().next().unwrap() {
|
||||
Literal(ident) => Ok(Attr::Str(ctx.new_sym(ident))),
|
||||
Interpolation(interpol) => {
|
||||
Ok(Attr::Dynamic(interpol.expr().unwrap().downgrade(ctx)?))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If the string has multiple parts, it's an interpolated string that must be concatenated.
|
||||
let parts = parts
|
||||
.into_iter()
|
||||
.map(|part| match part {
|
||||
Literal(lit) => Ok(ctx.new_expr(self::Str { val: lit }.to_ir())),
|
||||
Interpolation(interpol) => interpol.expr().unwrap().downgrade(ctx),
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(Attr::Dynamic(ctx.new_expr(ConcatStrings { parts }.to_ir())))
|
||||
}
|
||||
}
|
||||
Dynamic(dynamic) => Ok(Attr::Dynamic(dynamic.expr().unwrap().downgrade(ctx)?)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Downgrades an attribute path (e.g., `a.b."${c}".d`) into a `Vec<Attr>`.
|
||||
pub fn downgrade_attrpath(
|
||||
attrpath: ast::Attrpath,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<Vec<Attr>> {
|
||||
attrpath
|
||||
.attrs()
|
||||
.map(|attr| downgrade_attr(attr, ctx))
|
||||
.collect::<Result<Vec<_>>>()
|
||||
}
|
||||
|
||||
/// Downgrades an `attrpath = value;` expression and inserts it into an `AttrSet`.
|
||||
pub fn downgrade_attrpathvalue(
|
||||
value: ast::AttrpathValue,
|
||||
attrs: &mut AttrSet,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<()> {
|
||||
let path = downgrade_attrpath(value.attrpath().unwrap(), ctx)?;
|
||||
let value = maybe_thunk(value.value().unwrap(), ctx)?;
|
||||
attrs.insert(path, value, ctx)
|
||||
}
|
||||
|
||||
/// A stricter version of `downgrade_attrpathvalue` for `let...in` bindings.
|
||||
/// It ensures that the attribute path contains no dynamic parts.
|
||||
pub fn downgrade_static_attrpathvalue(
|
||||
value: ast::AttrpathValue,
|
||||
attrs: &mut AttrSet,
|
||||
ctx: &mut impl DowngradeContext,
|
||||
) -> Result<()> {
|
||||
let path = downgrade_attrpath(value.attrpath().unwrap(), ctx)?;
|
||||
if path.iter().any(|attr| matches!(attr, Attr::Dynamic(_))) {
|
||||
return Err(Error::downgrade_error(
|
||||
"dynamic attributes not allowed in let bindings".to_string(),
|
||||
));
|
||||
}
|
||||
let value = value.value().unwrap().downgrade(ctx)?;
|
||||
attrs.insert(path, value, ctx)
|
||||
}
|
||||
|
||||
/// Helper function to downgrade entries with let bindings semantics.
|
||||
/// This extracts common logic for both `rec` attribute sets and `let...in` expressions.
|
||||
///
|
||||
/// Returns a tuple of (binding slots, body result) where:
|
||||
/// - binding slots: pre-allocated expression slots for the bindings
|
||||
/// - body result: the result of calling `body_fn` in the let scope
|
||||
pub fn downgrade_let_bindings<Ctx, F, R>(
|
||||
entries: Vec<ast::Entry>,
|
||||
ctx: &mut Ctx,
|
||||
body_fn: F,
|
||||
) -> Result<(Vec<ExprId>, R)>
|
||||
where
|
||||
Ctx: DowngradeContext,
|
||||
F: FnOnce(&mut Ctx, &[SymId]) -> Result<R>,
|
||||
{
|
||||
// 1. Collect all top-level binding keys
|
||||
let mut binding_syms = HashSet::new();
|
||||
|
||||
for entry in &entries {
|
||||
match entry {
|
||||
ast::Entry::Inherit(inherit) => {
|
||||
for attr in inherit.attrs() {
|
||||
if let ast::Attr::Ident(ident) = attr {
|
||||
binding_syms.insert(ctx.new_sym(ident.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::Entry::AttrpathValue(value) => {
|
||||
let attrpath = value.attrpath().unwrap();
|
||||
if let Some(first_attr) = attrpath.attrs().next()
|
||||
&& let ast::Attr::Ident(ident) = first_attr
|
||||
{
|
||||
binding_syms.insert(ctx.new_sym(ident.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let binding_keys: Vec<_> = binding_syms.into_iter().collect();
|
||||
|
||||
// 2. Reserve slots for bindings
|
||||
let slots_iter = ctx.reserve_slots(binding_keys.len());
|
||||
let slots_clone = slots_iter.clone();
|
||||
|
||||
// 3. Create let scope bindings
|
||||
let let_bindings: HashMap<_, _> = binding_keys.iter().copied().zip(slots_iter).collect();
|
||||
|
||||
// 4. Process entries in let scope
|
||||
let body = ctx.with_let_scope(let_bindings, |ctx| {
|
||||
// Collect all bindings in a temporary AttrSet
|
||||
let mut temp_attrs = AttrSet {
|
||||
stcs: HashMap::new(),
|
||||
dyns: Vec::new(),
|
||||
};
|
||||
|
||||
for entry in entries {
|
||||
match entry {
|
||||
ast::Entry::Inherit(inherit) => {
|
||||
downgrade_inherit(inherit, &mut temp_attrs.stcs, ctx)?;
|
||||
}
|
||||
ast::Entry::AttrpathValue(value) => {
|
||||
downgrade_static_attrpathvalue(value, &mut temp_attrs, ctx)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fill pre-allocated slots with top-level bindings
|
||||
for (sym, slot) in binding_keys.iter().copied().zip(slots_clone.clone()) {
|
||||
if let Some(&expr) = temp_attrs.stcs.get(&sym) {
|
||||
ctx.replace_expr(slot, Ir::Thunk(expr));
|
||||
} else {
|
||||
return Err(Error::downgrade_error(format!(
|
||||
"binding '{}' not found",
|
||||
format_symbol(ctx.get_sym(sym))
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Call the body function with the binding keys
|
||||
body_fn(ctx, &binding_keys)
|
||||
})?;
|
||||
|
||||
// 5. Return the slots and body
|
||||
Ok((slots_clone.collect(), body))
|
||||
}
|
||||
9
nix-js/src/lib.rs
Normal file
9
nix-js/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
pub mod codegen;
|
||||
pub mod context;
|
||||
pub mod error;
|
||||
pub mod ir;
|
||||
pub mod runtime;
|
||||
pub mod value;
|
||||
|
||||
#[global_allocator]
|
||||
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
149
nix-js/src/runtime.rs
Normal file
149
nix-js/src/runtime.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
use std::cell::RefCell;
|
||||
use std::sync::Once;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::value::{AttrSet, Const, List, Symbol, Value};
|
||||
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
thread_local! {
|
||||
static ISOLATE: RefCell<v8::OwnedIsolate> =
|
||||
RefCell::new(v8::Isolate::new(Default::default()));
|
||||
}
|
||||
|
||||
pub fn run(script: &str) -> Result<Value> {
|
||||
INIT.call_once(|| {
|
||||
v8::V8::initialize_platform(v8::new_default_platform(0, false).make_shared());
|
||||
v8::V8::initialize();
|
||||
});
|
||||
|
||||
ISOLATE.with_borrow_mut(|isolate| run_impl(script, isolate))
|
||||
}
|
||||
|
||||
fn run_impl(script: &str, isolate: &mut v8::Isolate) -> Result<Value> {
|
||||
let handle_scope = std::pin::pin!(v8::HandleScope::new(isolate));
|
||||
let handle_scope = &mut handle_scope.init();
|
||||
let context = v8::Context::new(handle_scope, v8::ContextOptions::default());
|
||||
let scope = &mut v8::ContextScope::new(handle_scope, context);
|
||||
|
||||
let runtime_code = include_str!("./runtime/runtime.js");
|
||||
let runtime_source = v8::String::new(scope, runtime_code).unwrap();
|
||||
let runtime_script = v8::Script::compile(scope, runtime_source, None).unwrap();
|
||||
|
||||
if runtime_script.run(scope).is_none() {
|
||||
return Err(Error::eval_error("Failed to initialize runtime".to_string()));
|
||||
}
|
||||
|
||||
let source = v8::String::new(scope, script).unwrap();
|
||||
|
||||
// Use TryCatch to capture JavaScript exceptions
|
||||
let try_catch = std::pin::pin!(v8::TryCatch::new(scope));
|
||||
let try_catch = &mut try_catch.init();
|
||||
let script = match v8::Script::compile(try_catch, source, None) {
|
||||
Some(script) => script,
|
||||
None => {
|
||||
if let Some(exception) = try_catch.exception() {
|
||||
let exception_string = exception
|
||||
.to_string(try_catch)
|
||||
.unwrap()
|
||||
.to_rust_string_lossy(try_catch);
|
||||
return Err(Error::eval_error(format!("Compilation error: {}", exception_string)));
|
||||
} else {
|
||||
return Err(Error::eval_error("Unknown compilation error".to_string()));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match script.run(try_catch) {
|
||||
Some(result) => Ok(to_value(result, try_catch)),
|
||||
None => {
|
||||
if let Some(exception) = try_catch.exception() {
|
||||
let exception_string = exception
|
||||
.to_string(try_catch)
|
||||
.unwrap()
|
||||
.to_rust_string_lossy(try_catch);
|
||||
Err(Error::eval_error(format!("Runtime error: {}", exception_string)))
|
||||
} else {
|
||||
Err(Error::eval_error("Unknown runtime error".to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_value<'a>(
|
||||
val: v8::Local<'a, v8::Value>,
|
||||
scope: &v8::PinnedRef<'a, v8::HandleScope>,
|
||||
) -> Value {
|
||||
match () {
|
||||
_ if val.is_int32() => {
|
||||
let val = val.to_int32(scope).unwrap().value();
|
||||
Value::Const(Const::Int(val as i64))
|
||||
}
|
||||
_ if val.is_big_int() => {
|
||||
let (val, true) = val.to_big_int(scope).unwrap().i64_value() else {
|
||||
todo!()
|
||||
};
|
||||
Value::Const(Const::Int(val))
|
||||
}
|
||||
_ if val.is_number() => {
|
||||
let val = val.to_number(scope).unwrap().value();
|
||||
Value::Const(Const::Float(val))
|
||||
}
|
||||
_ if val.is_true() => Value::Const(Const::Bool(true)),
|
||||
_ if val.is_false() => Value::Const(Const::Bool(false)),
|
||||
_ if val.is_null() => Value::Const(Const::Bool(true)),
|
||||
_ if val.is_string() => {
|
||||
let val = val.to_string(scope).unwrap();
|
||||
Value::String(val.to_rust_string_lossy(scope))
|
||||
}
|
||||
_ if val.is_array() => {
|
||||
let val = val.try_cast::<v8::Array>().unwrap();
|
||||
let len = val.length();
|
||||
let list = (0..len)
|
||||
.map(|i| {
|
||||
let val = val.get_index(scope, i).unwrap();
|
||||
to_value(val, scope)
|
||||
})
|
||||
.collect();
|
||||
Value::List(List::new(list))
|
||||
}
|
||||
_ if val.is_object() => {
|
||||
let val = val.to_object(scope).unwrap();
|
||||
let keys = val
|
||||
.get_own_property_names(scope, v8::GetPropertyNamesArgsBuilder::new().build())
|
||||
.unwrap();
|
||||
let len = keys.length();
|
||||
let attrs = (0..len)
|
||||
.map(|i| {
|
||||
let key = keys.get_index(scope, i).unwrap();
|
||||
let val = val.get(scope, key).unwrap();
|
||||
let key = key.to_rust_string_lossy(scope);
|
||||
(Symbol::new(key), to_value(val, scope))
|
||||
})
|
||||
.collect();
|
||||
Value::AttrSet(AttrSet::new(attrs))
|
||||
}
|
||||
_ if val.is_function_template() => Value::PrimOp,
|
||||
_ if val.is_function() => Value::Func,
|
||||
_ => todo!("{}", val.type_repr()),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_value_working() {
|
||||
assert_eq!(
|
||||
run("({
|
||||
test: [1, 9223372036854775807n, true, false, 'hello world!']
|
||||
})").unwrap(),
|
||||
Value::AttrSet(AttrSet::new(std::collections::BTreeMap::from([(
|
||||
Symbol::from("test"),
|
||||
Value::List(List::new(vec![
|
||||
Value::Const(Const::Int(1)),
|
||||
Value::Const(Const::Int(9223372036854775807)),
|
||||
Value::Const(Const::Bool(true)),
|
||||
Value::Const(Const::Bool(false)),
|
||||
Value::String("hello world!".to_string())
|
||||
]))
|
||||
)])))
|
||||
);
|
||||
}
|
||||
145
nix-js/src/runtime/runtime.js
Normal file
145
nix-js/src/runtime/runtime.js
Normal file
@@ -0,0 +1,145 @@
|
||||
const NixRuntime = (() => {
|
||||
const IS_THUNK = Symbol("is_thunk");
|
||||
|
||||
class NixThunk {
|
||||
constructor(func) {
|
||||
this[IS_THUNK] = true;
|
||||
this.func = func;
|
||||
this.is_forced = false;
|
||||
this.result = null;
|
||||
}
|
||||
}
|
||||
|
||||
const is_thunk = (value) => {
|
||||
return value !== null && typeof value === "object" && value[IS_THUNK] === true;
|
||||
};
|
||||
|
||||
const force = (value) => {
|
||||
if (!is_thunk(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value.is_forced) {
|
||||
return value.result;
|
||||
}
|
||||
|
||||
const result = force(value.func());
|
||||
value.result = result;
|
||||
value.is_forced = true;
|
||||
|
||||
value.func = null;
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const create_thunk = (func) => new NixThunk(func);
|
||||
|
||||
const create_lazy_set = (definitions) => {
|
||||
const cache = new Map();
|
||||
return new Proxy({}, {
|
||||
get: (_target, key) => {
|
||||
if (cache.has(key)) {
|
||||
return cache.get(key);
|
||||
}
|
||||
|
||||
if (key in definitions) {
|
||||
const value = definitions[key]();
|
||||
cache.set(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const trace = (msg, value) => {
|
||||
console.log(`[TRACE] ${msg}`);
|
||||
return force(value);
|
||||
};
|
||||
|
||||
const select = (obj, key) => {
|
||||
const forced_obj = force(obj);
|
||||
const forced_key = force(key);
|
||||
|
||||
if (forced_obj === null || forced_obj === undefined) {
|
||||
throw new Error(`Cannot select '${forced_key}' from null/undefined`);
|
||||
}
|
||||
|
||||
if (!(forced_key in forced_obj)) {
|
||||
throw new Error(`Attribute '${forced_key}' not found`);
|
||||
}
|
||||
|
||||
return forced_obj[forced_key];
|
||||
};
|
||||
|
||||
const select_with_default = (obj, key, default_val) => {
|
||||
const forced_obj = force(obj);
|
||||
const forced_key = force(key);
|
||||
|
||||
if (forced_obj === null || forced_obj === undefined) {
|
||||
return force(default_val);
|
||||
}
|
||||
|
||||
if (!(forced_key in forced_obj)) {
|
||||
return force(default_val);
|
||||
}
|
||||
|
||||
return forced_obj[forced_key];
|
||||
};
|
||||
|
||||
const validate_params = (arg, required, allowed) => {
|
||||
const forced_arg = force(arg);
|
||||
|
||||
// Check required parameters
|
||||
if (required) {
|
||||
for (const key of required) {
|
||||
if (!Object.prototype.hasOwnProperty.call(forced_arg, key)) {
|
||||
throw new Error(`Function called without required argument '${key}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check allowed parameters (if not using ellipsis)
|
||||
if (allowed) {
|
||||
const allowed_set = new Set(allowed);
|
||||
for (const key in forced_arg) {
|
||||
if (!allowed_set.has(key)) {
|
||||
throw new Error(`Function called with unexpected argument '${key}'`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return forced_arg;
|
||||
};
|
||||
|
||||
const op = {
|
||||
add: (a, b) => force(a) + force(b),
|
||||
sub: (a, b) => force(a) - force(b),
|
||||
mul: (a, b) => force(a) * force(b),
|
||||
div: (a, b) => force(a) / force(b),
|
||||
|
||||
eq: (a, b) => force(a) === force(b),
|
||||
neq: (a, b) => force(a) !== force(b),
|
||||
lt: (a, b) => force(a) < force(b),
|
||||
lte: (a, b) => force(a) <= force(b),
|
||||
gt: (a, b) => force(a) > force(b),
|
||||
gte: (a, b) => force(a) >= force(b),
|
||||
|
||||
band: (a, b) => force(a) && force(b),
|
||||
bor: (a, b) => force(a) || force(b),
|
||||
bnot: (a) => !force(a)
|
||||
};
|
||||
|
||||
return {
|
||||
create_thunk,
|
||||
force,
|
||||
is_thunk,
|
||||
create_lazy_set,
|
||||
trace,
|
||||
select,
|
||||
select_with_default,
|
||||
validate_params,
|
||||
op
|
||||
};
|
||||
})();
|
||||
211
nix-js/src/value.rs
Normal file
211
nix-js/src/value.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
use core::fmt::{Debug, Display, Formatter, Result as FmtResult};
|
||||
use core::hash::Hash;
|
||||
use core::ops::Deref;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use derive_more::{Constructor, IsVariant, Unwrap};
|
||||
use regex::Regex;
|
||||
|
||||
/// Represents a constant, primitive value in Nix.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, IsVariant, Unwrap)]
|
||||
pub enum Const {
|
||||
/// A boolean value (`true` or `false`).
|
||||
Bool(bool),
|
||||
/// A 64-bit signed integer.
|
||||
Int(i64),
|
||||
/// A 64-bit floating-point number.
|
||||
Float(f64),
|
||||
/// The `null` value.
|
||||
Null,
|
||||
}
|
||||
|
||||
impl Display for Const {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
use Const::*;
|
||||
match self {
|
||||
Int(x) => write!(f, "{x}"),
|
||||
Float(x) => write!(f, "{x}"),
|
||||
Bool(x) => write!(f, "{x}"),
|
||||
Null => write!(f, "null"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Const {
|
||||
fn from(value: bool) -> Self {
|
||||
Const::Bool(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for Const {
|
||||
fn from(value: i64) -> Self {
|
||||
Const::Int(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Const {
|
||||
fn from(value: f64) -> Self {
|
||||
Const::Float(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a Nix symbol, which is used as a key in attribute sets.
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Constructor)]
|
||||
pub struct Symbol(String);
|
||||
|
||||
impl<T: Into<String>> From<T> for Symbol {
|
||||
fn from(value: T) -> Self {
|
||||
Symbol(value.into())
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats a string slice as a Nix symbol, quoting it if necessary.
|
||||
pub fn format_symbol<'a>(sym: impl Into<Cow<'a, str>>) -> Cow<'a, str> {
|
||||
let sym = sym.into();
|
||||
if REGEX.is_match(&sym) {
|
||||
sym
|
||||
} else {
|
||||
Cow::Owned(format!(r#""{sym}""#))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Symbol {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
if self.normal() {
|
||||
write!(f, "{}", self.0)
|
||||
} else {
|
||||
write!(f, r#""{}""#, self.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_'-]*$").unwrap());
|
||||
impl Symbol {
|
||||
/// Checks if the symbol is a "normal" identifier that doesn't require quotes.
|
||||
fn normal(&self) -> bool {
|
||||
REGEX.is_match(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Symbol {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
/// Consumes the `Symbol`, returning its inner `String`.
|
||||
pub fn into_inner(self) -> String {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Returns a reference to the inner `String`.
|
||||
pub fn as_inner(&self) -> &String {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a Nix attribute set, which is a map from symbols to values.
|
||||
#[derive(Constructor, Clone, PartialEq)]
|
||||
pub struct AttrSet {
|
||||
data: BTreeMap<Symbol, Value>,
|
||||
}
|
||||
|
||||
impl Debug for AttrSet {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
use Value::*;
|
||||
write!(f, "{{")?;
|
||||
for (k, v) in self.data.iter() {
|
||||
write!(f, " {k:?} = ")?;
|
||||
match v {
|
||||
List(_) => write!(f, "[ ... ];")?,
|
||||
AttrSet(_) => write!(f, "{{ ... }};")?,
|
||||
v => write!(f, "{v:?};")?,
|
||||
}
|
||||
}
|
||||
write!(f, " }}")
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for AttrSet {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
use Value::*;
|
||||
write!(f, "{{ ")?;
|
||||
let mut first = true;
|
||||
for (k, v) in self.data.iter() {
|
||||
if !first {
|
||||
write!(f, "; ")?;
|
||||
}
|
||||
write!(f, "{k} = ")?;
|
||||
match v {
|
||||
AttrSet(_) => write!(f, "{{ ... }}"),
|
||||
List(_) => write!(f, "[ ... ]"),
|
||||
v => write!(f, "{v}"),
|
||||
}?;
|
||||
first = false;
|
||||
}
|
||||
write!(f, " }}")
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a Nix list, which is a vector of values.
|
||||
#[derive(Constructor, Clone, Debug, PartialEq)]
|
||||
pub struct List {
|
||||
data: Vec<Value>,
|
||||
}
|
||||
|
||||
impl Display for List {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
write!(f, "[ ")?;
|
||||
for v in self.data.iter() {
|
||||
write!(f, "{v} ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents any possible Nix value that can be returned from an evaluation.
|
||||
#[derive(IsVariant, Unwrap, Clone, Debug, PartialEq)]
|
||||
pub enum Value {
|
||||
/// A constant value (int, float, bool, null).
|
||||
Const(Const),
|
||||
/// A string value.
|
||||
String(String),
|
||||
/// An attribute set.
|
||||
AttrSet(AttrSet),
|
||||
/// A list.
|
||||
List(List),
|
||||
/// A thunk, representing a delayed computation.
|
||||
Thunk,
|
||||
/// A function (lambda).
|
||||
Func,
|
||||
/// A primitive (built-in) operation.
|
||||
PrimOp,
|
||||
/// A partially applied primitive operation.
|
||||
PrimOpApp,
|
||||
/// A marker for a value that has been seen before during serialization, to break cycles.
|
||||
/// This is used to prevent infinite recursion when printing or serializing cyclic data structures.
|
||||
Repeated,
|
||||
}
|
||||
|
||||
impl Display for Value {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
use Value::*;
|
||||
match self {
|
||||
Const(x) => write!(f, "{x}"),
|
||||
String(x) => write!(f, r#""{x}""#),
|
||||
AttrSet(x) => write!(f, "{x}"),
|
||||
List(x) => write!(f, "{x}"),
|
||||
Thunk => write!(f, "<CODE>"),
|
||||
Func => write!(f, "<LAMBDA>"),
|
||||
PrimOp => write!(f, "<PRIMOP>"),
|
||||
PrimOpApp => write!(f, "<PRIMOP-APP>"),
|
||||
Repeated => write!(f, "<REPEATED>"),
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user