refactor: handle derivation generation on Rust side

This commit is contained in:
2026-02-15 18:16:50 +08:00
parent e357678d70
commit 7836f8c869
16 changed files with 745 additions and 607 deletions

19
Cargo.lock generated
View File

@@ -1781,6 +1781,12 @@ dependencies = [
"regex-automata",
]
[[package]]
name = "md5"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "memchr"
version = "2.8.0"
@@ -1924,6 +1930,7 @@ dependencies = [
"hashbrown 0.16.1",
"hex",
"itertools 0.14.0",
"md5",
"miette",
"mimalloc",
"nix-compat",
@@ -1939,6 +1946,7 @@ dependencies = [
"rustyline",
"serde",
"serde_json",
"sha1",
"sha2",
"string-interner",
"tap",
@@ -2895,6 +2903,17 @@ dependencies = [
"v8",
]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sha2"
version = "0.10.9"

View File

@@ -34,6 +34,7 @@
just
samply
jq
tokei
nodejs
nodePackages.npm

View File

@@ -36,6 +36,8 @@ deno_error = "0.7"
nix-nar = "0.3"
sha2 = "0.10"
sha1 = "0.10"
md5 = "0.7"
hex = "0.4"
base64 = "0.22"

View File

@@ -1,6 +1,7 @@
import { mkPos } from "../helpers";
import { createThunk } from "../thunk";
import { forceAttrs, forceFunction, forceList, forceStringValue } from "../type-assert";
import type { NixAttrs, NixList, NixValue } from "../types";
import { ATTR_POSITIONS, type NixAttrs, type NixList, type NixValue } from "../types";
export const attrNames = (set: NixValue): string[] => Object.keys(forceAttrs(set)).sort();
@@ -139,7 +140,7 @@ export const unsafeGetAttrPos =
return null;
}
const positions = (attrs as NixAttrs & Record<symbol, unknown>)[Nix.ATTR_POSITIONS] as
const positions = (attrs as NixAttrs & Record<symbol, unknown>)[ATTR_POSITIONS] as
| Record<string, string>
| undefined;
if (!positions || !(name in positions)) {
@@ -147,5 +148,5 @@ export const unsafeGetAttrPos =
}
const span = positions[name];
return Nix.mkPos(span);
return mkPos(span);
};

View File

@@ -1,7 +1,6 @@
import {
addBuiltContext,
addDrvDeepContext,
extractInputDrvsAndSrcs,
mkStringWithContext,
type NixStringContext,
} from "../string-context";
@@ -10,8 +9,6 @@ import { forceAttrs, forceList, forceStringNoCtx, forceStringValue } from "../ty
import type { NixAttrs, NixValue } from "../types";
import { coerceToString, nixValueToJson, StringCoercionMode } from "./conversion";
const drvHashCache = new Map<string, string>();
export interface OutputInfo {
path: string;
hashAlgo: string;
@@ -190,13 +187,6 @@ const extractArgs = (attrs: NixAttrs, outContext: NixStringContext): string[] =>
return argsList.map((a) => coerceToString(a, StringCoercionMode.ToString, true, outContext));
};
const outputPathName = (drvName: string, output: string) => {
if (output === "out") {
return drvName;
}
return `${drvName}-${output}`;
};
const structuredAttrsExcludedKeys = new Set([
"__structuredAttrs",
"__ignoreNulls",
@@ -369,134 +359,33 @@ export const derivationStrict = (args: NixValue): NixAttrs => {
const drvArgs = extractArgs(attrs, collectedContext);
const env = extractEnv(attrs, structuredAttrs, ignoreNulls, collectedContext, drvName);
const { inputDrvs, inputSrcs } = extractInputDrvsAndSrcs(collectedContext);
const envEntries: [string, string][] = Array.from(env.entries());
const contextArray: string[] = Array.from(collectedContext);
const collectDrvReferences = (): string[] => {
const refs = new Set<string>();
for (const src of inputSrcs) {
refs.add(src);
}
for (const drvPath of inputDrvs.keys()) {
refs.add(drvPath);
}
return Array.from(refs).sort();
};
let outputInfos: Map<string, OutputInfo>;
let drvPath: string;
if (fixedOutputInfo) {
const pathName = outputPathName(drvName, "out");
const outPath = Deno.core.ops.op_make_fixed_output_path(
fixedOutputInfo.hashAlgo,
fixedOutputInfo.hash,
fixedOutputInfo.hashMode,
pathName,
);
const hashAlgoPrefix = fixedOutputInfo.hashMode === "recursive" ? "r:" : "";
outputInfos = new Map([
[
"out",
{
path: outPath,
hashAlgo: hashAlgoPrefix + fixedOutputInfo.hashAlgo,
hash: fixedOutputInfo.hash,
},
],
]);
env.set("out", outPath);
const finalDrv: DerivationData = {
const rustResult: {
drvPath: string;
outputs: [string, string][];
} = Deno.core.ops.op_finalize_derivation({
name: drvName,
outputs: outputInfos,
inputDrvs,
inputSrcs,
platform,
builder,
args: drvArgs,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const fixedHashFingerprint = `fixed:out:${hashAlgoPrefix}${fixedOutputInfo.hashAlgo}:${fixedOutputInfo.hash}:${outPath}`;
const fixedModuloHash = Deno.core.ops.op_sha256_hex(fixedHashFingerprint);
drvHashCache.set(drvPath, fixedModuloHash);
} else {
const maskedOutputs = new Map<string, OutputInfo>(
outputs.map((o) => [
o,
{
path: "",
hashAlgo: "",
hash: "",
},
]),
);
const maskedEnv = new Map(env);
for (const output of outputs) {
maskedEnv.set(output, "");
}
const maskedDrv: DerivationData = {
name: drvName,
outputs: maskedOutputs,
inputDrvs,
inputSrcs,
platform,
builder,
outputs,
args: drvArgs,
env: maskedEnv,
};
const inputDrvHashes = new Map<string, string>();
for (const [drvPath, outputNames] of inputDrvs) {
const cachedHash = drvHashCache.get(drvPath);
if (!cachedHash) {
throw new Error(`Missing modulo hash for input derivation: ${drvPath}`);
}
inputDrvHashes.set(cachedHash, Array.from(outputNames).join(","));
}
const maskedAterm = generateAtermModulo(maskedDrv, inputDrvHashes);
const drvModuloHash = Deno.core.ops.op_sha256_hex(maskedAterm);
outputInfos = new Map<string, OutputInfo>();
for (const outputName of outputs) {
const pathName = outputPathName(drvName, outputName);
const outPath = Deno.core.ops.op_make_store_path(`output:${outputName}`, drvModuloHash, pathName);
outputInfos.set(outputName, {
path: outPath,
hashAlgo: "",
hash: "",
env: envEntries,
context: contextArray,
fixedOutput: fixedOutputInfo,
});
env.set(outputName, outPath);
}
const finalDrv: DerivationData = {
...maskedDrv,
outputs: outputInfos,
env,
};
const finalAterm = generateAterm(finalDrv);
drvPath = Deno.core.ops.op_write_derivation(drvName, finalAterm, collectDrvReferences());
const finalAtermModulo = generateAtermModulo(finalDrv, inputDrvHashes);
const cachedModuloHash = Deno.core.ops.op_sha256_hex(finalAtermModulo);
drvHashCache.set(drvPath, cachedModuloHash);
}
const result: NixAttrs = {};
const drvPathContext = new Set<string>();
addDrvDeepContext(drvPathContext, drvPath);
result.drvPath = mkStringWithContext(drvPath, drvPathContext);
addDrvDeepContext(drvPathContext, rustResult.drvPath);
result.drvPath = mkStringWithContext(rustResult.drvPath, drvPathContext);
for (const [outputName, outputInfo] of outputInfos.entries()) {
for (const [outputName, outputPath] of rustResult.outputs) {
const outputContext = new Set<string>();
addBuiltContext(outputContext, drvPath, outputName);
result[outputName] = mkStringWithContext(outputInfo.path, outputContext);
addBuiltContext(outputContext, rustResult.drvPath, outputName);
result[outputName] = mkStringWithContext(outputPath, outputContext);
}
return result;

View File

@@ -209,7 +209,7 @@ export const genericClosure = (args: NixValue): NixValue => {
export const outputOf =
(_drv: NixValue) =>
(_out: NixValue): never => {
throw new Error("Not implemented: outputOf");
throw new Error("Not implemented: outputOf (part of dynamic-derivation)");
};
export const parseDrvName = (s: NixValue): NixAttrs => {
@@ -320,8 +320,9 @@ export const splitVersion = (s: NixValue): NixValue => {
return components;
};
export const traceVerbose = (_e1: NixValue, _e2: NixValue): never => {
throw new Error("Not implemented: traceVerbose");
export const traceVerbose = (_e1: NixValue, e2: NixValue): NixStrictValue => {
// TODO: implement traceVerbose
return force(e2)
};
export const tryEval = (e: NixValue): { success: NixBool; value: NixStrictValue } => {

View File

@@ -146,52 +146,3 @@ export const parseContextToInfoMap = (context: NixStringContext): Map<string, Pa
return result;
};
export const extractInputDrvsAndSrcs = (
context: NixStringContext,
): { inputDrvs: Map<string, Set<string>>; inputSrcs: Set<string> } => {
const inputDrvs = new Map<string, Set<string>>();
const inputSrcs = new Set<string>();
for (const encoded of context) {
const elem = decodeContextElem(encoded);
switch (elem.type) {
case "opaque":
inputSrcs.add(elem.path);
break;
case "drvDeep": {
const closure: {
input_drvs: [string, string[]][];
input_srcs: string[];
} = Deno.core.ops.op_compute_fs_closure(elem.drvPath);
for (const src of closure.input_srcs) {
inputSrcs.add(src);
}
for (const [drvPath, outputs] of closure.input_drvs) {
let existingOutputs = inputDrvs.get(drvPath);
if (!existingOutputs) {
existingOutputs = new Set<string>();
inputDrvs.set(drvPath, existingOutputs);
}
for (const output of outputs) {
existingOutputs.add(output);
}
}
break;
}
case "built": {
let outputs = inputDrvs.get(elem.drvPath);
if (!outputs) {
outputs = new Set<string>();
inputDrvs.set(elem.drvPath, outputs);
}
outputs.add(elem.output);
break;
}
}
}
return { inputDrvs, inputSrcs };
};

View File

@@ -1,33 +1,70 @@
import type { NixRuntime } from "..";
import type { FetchTarballResult, FetchUrlResult, FetchGitResult } from "../builtins/io";
import type { FetchGitResult, FetchTarballResult, FetchUrlResult } from "../builtins/io";
declare global {
var Nix: NixRuntime;
namespace Deno {
namespace core {
namespace ops {
function op_resolve_path(currentDir: string, path: string): string;
function op_import(path: string): string;
function op_scoped_import(path: string, scopeKeys: string[]): string;
function op_resolve_path(currentDir: string, path: string): string;
function op_read_file(path: string): string;
function op_read_file_type(path: string): string;
function op_read_dir(path: string): Record<string, string>;
function op_path_exists(path: string): boolean;
function op_sha256_hex(data: string): string;
function op_walk_dir(path: string): [string, string][];
function op_make_placeholder(output: string): string;
function op_store_path(path: string): string;
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
function op_add_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
): string;
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
includePaths: string[],
): string;
function op_decode_span(span: string): {
file: string | null;
line: number | null;
column: number | null;
};
function op_make_store_path(ty: string, hashHex: string, name: string): string;
function op_parse_hash(hashStr: string, algo: string | null): { hex: string; algo: string };
function op_make_fixed_output_path(
hashAlgo: string,
hash: string,
hashMode: string,
name: string,
): string;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): unknown;
function op_from_toml(toml: string): unknown;
function op_finalize_derivation(input: {
name: string;
builder: string;
platform: string;
outputs: string[];
args: string[];
env: [string, string][];
context: string[];
fixedOutput: { hashAlgo: string; hash: string; hashMode: string } | null;
}): { drvPath: string; outputs: [string, string][] };
function op_fetch_url(
url: string,
expectedHash: string | null,
@@ -48,34 +85,6 @@ declare global {
allRefs: boolean,
name: string | null,
): FetchGitResult;
function op_add_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
): string;
function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string;
function op_write_derivation(drvName: string, aterm: string, references: string[]): string;
function op_read_derivation_outputs(drvPath: string): string[];
function op_compute_fs_closure(drvPath: string): {
input_drvs: [string, string[]][];
input_srcs: string[];
};
function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
includePaths: string[],
): string;
function op_match(regex: string, text: string): (string | null)[] | null;
function op_split(regex: string, text: string): (string | (string | null)[])[];
function op_from_json(json: string): unknown;
function op_from_toml(toml: string): unknown;
}
}
}

145
nix-js/src/derivation.rs Normal file
View File

@@ -0,0 +1,145 @@
use std::collections::{BTreeMap, BTreeSet};
pub struct OutputInfo {
pub path: String,
pub hash_algo: String,
pub hash: String,
}
pub struct DerivationData {
pub name: String,
pub outputs: BTreeMap<String, OutputInfo>,
pub input_drvs: BTreeMap<String, BTreeSet<String>>,
pub input_srcs: BTreeSet<String>,
pub platform: String,
pub builder: String,
pub args: Vec<String>,
pub env: BTreeMap<String, String>,
}
fn escape_string(s: &str) -> String {
let mut result = String::with_capacity(s.len() + 2);
result.push('"');
for c in s.chars() {
match c {
'"' => result.push_str("\\\""),
'\\' => result.push_str("\\\\"),
'\n' => result.push_str("\\n"),
'\r' => result.push_str("\\r"),
'\t' => result.push_str("\\t"),
_ => result.push(c),
}
}
result.push('"');
result
}
fn quote_string(s: &str) -> String {
format!("\"{}\"", s)
}
impl DerivationData {
pub fn generate_aterm(&self) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_path, output_names) in &self.input_drvs {
let sorted_outs: Vec<String> = output_names.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", sorted_outs.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_path), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn generate_aterm_modulo(
&self,
input_drv_hashes: &BTreeMap<String, String>,
) -> String {
let mut output_entries = Vec::new();
for (name, info) in &self.outputs {
output_entries.push(format!(
"({},{},{},{})",
quote_string(name),
quote_string(&info.path),
quote_string(&info.hash_algo),
quote_string(&info.hash),
));
}
let outputs = output_entries.join(",");
let mut input_drv_entries = Vec::new();
for (drv_hash, outputs_csv) in input_drv_hashes {
let mut sorted_outs: Vec<&str> = outputs_csv.split(',').collect();
sorted_outs.sort();
let out_list: Vec<String> = sorted_outs.iter().map(|s| quote_string(s)).collect();
let out_list = format!("[{}]", out_list.join(","));
input_drv_entries.push(format!("({},{})", quote_string(drv_hash), out_list));
}
let input_drvs = input_drv_entries.join(",");
let input_srcs: Vec<String> = self.input_srcs.iter().map(|s| quote_string(s)).collect();
let input_srcs = input_srcs.join(",");
let args: Vec<String> = self.args.iter().map(|s| escape_string(s)).collect();
let args = args.join(",");
let mut env_entries: Vec<String> = Vec::new();
for (k, v) in &self.env {
env_entries.push(format!("({},{})", escape_string(k), escape_string(v)));
}
format!(
"Derive([{}],[{}],[{}],{},{},[{}],[{}])",
outputs,
input_drvs,
input_srcs,
quote_string(&self.platform),
escape_string(&self.builder),
args,
env_entries.join(","),
)
}
pub fn collect_references(&self) -> Vec<String> {
let mut refs = BTreeSet::new();
for src in &self.input_srcs {
refs.insert(src.clone());
}
for drv_path in self.input_drvs.keys() {
refs.insert(drv_path.clone());
}
refs.into_iter().collect()
}
}

View File

@@ -6,6 +6,7 @@ pub mod logging;
pub mod value;
mod codegen;
mod derivation;
mod downgrade;
mod fetcher;
mod ir;
@@ -13,6 +14,7 @@ mod nar;
mod nix_utils;
mod runtime;
mod store;
mod string_context;
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;

View File

@@ -46,31 +46,38 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
let mut ops = vec![
op_import::<Ctx>(),
op_scoped_import::<Ctx>(),
op_resolve_path(),
op_read_file(),
op_read_file_type(),
op_read_dir(),
op_path_exists(),
op_resolve_path(),
op_sha256_hex(),
op_make_placeholder(),
op_decode_span::<Ctx>(),
op_make_store_path::<Ctx>(),
op_parse_hash(),
op_make_fixed_output_path::<Ctx>(),
op_add_path::<Ctx>(),
op_store_path::<Ctx>(),
op_to_file::<Ctx>(),
op_write_derivation::<Ctx>(),
op_read_derivation_outputs(),
op_compute_fs_closure(),
op_copy_path_to_store::<Ctx>(),
op_get_env(),
op_walk_dir(),
op_make_placeholder(),
op_store_path::<Ctx>(),
op_parse_hash(),
op_add_path::<Ctx>(),
op_add_filtered_path::<Ctx>(),
op_decode_span::<Ctx>(),
op_to_file::<Ctx>(),
op_copy_path_to_store::<Ctx>(),
op_get_env(),
op_match(),
op_split(),
op_from_json(),
op_from_toml(),
op_finalize_derivation::<Ctx>(),
];
ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -86,7 +93,6 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
mod private {
use deno_error::js_error_wrapper;
#[allow(dead_code)]
#[derive(Debug)]
pub struct SimpleErrorWrapper(pub(crate) String);
impl std::fmt::Display for SimpleErrorWrapper {
@@ -142,6 +148,7 @@ impl<Ctx: RuntimeContext> Runtime<Ctx> {
});
js_runtime.op_state().borrow_mut().put(RegexCache::new());
js_runtime.op_state().borrow_mut().put(DrvHashCache::new());
let (
is_thunk_symbol,

View File

@@ -244,12 +244,6 @@ pub(super) fn op_resolve_path(
Ok(normalized.to_string_lossy().to_string())
}
#[deno_core::op2]
#[string]
pub(super) fn op_sha256_hex(#[string] data: String) -> String {
crate::nix_utils::sha256_hex(data.as_bytes())
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_placeholder(#[string] output: String) -> String {
@@ -312,20 +306,6 @@ fn byte_offset_to_line_col(content: &str, offset: usize) -> (u32, u32) {
(line, col)
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_store_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] ty: String,
#[string] hash_hex: String,
#[string] name: String,
) -> String {
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
crate::nix_utils::make_store_path(store_dir, &ty, &hash_hex, &name)
}
#[derive(serde::Serialize)]
pub(super) struct ParsedHash {
hex: String,
@@ -359,34 +339,6 @@ pub(super) fn op_parse_hash(
})
}
#[deno_core::op2]
#[string]
pub(super) fn op_make_fixed_output_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] hash_algo: String,
#[string] hash: String,
#[string] hash_mode: String,
#[string] name: String,
) -> String {
use sha2::{Digest, Sha256};
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir();
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_utils::make_store_path(store_dir, "source", &hash, &name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_utils::make_store_path(store_dir, "output:out", &inner_hash, &name)
}
}
#[deno_core::op2]
#[string]
pub(super) fn op_add_path<Ctx: RuntimeContext>(
@@ -504,269 +456,6 @@ pub(super) fn op_to_file<Ctx: RuntimeContext>(
Ok(store_path)
}
#[deno_core::op2]
#[string]
pub(super) fn op_write_derivation<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] drv_name: String,
#[string] aterm: String,
#[serde] references: Vec<String>,
) -> std::result::Result<String, NixRuntimeError> {
tracing::debug!(
"op_write_derivation: name={}.drv, references={:?}",
drv_name,
references
);
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_text_to_store(&format!("{}.drv", drv_name), &aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
Ok(store_path)
}
#[deno_core::op2]
#[serde]
pub(super) fn op_read_derivation_outputs(
#[string] drv_path: String,
) -> std::result::Result<Vec<String>, NixRuntimeError> {
let content = std::fs::read_to_string(&drv_path).map_err(|e| {
NixRuntimeError::from(format!("failed to read derivation {}: {}", drv_path, e))
})?;
let outputs = parse_derivation_outputs(&content)
.ok_or_else(|| NixRuntimeError::from(format!("failed to parse derivation {}", drv_path)))?;
Ok(outputs)
}
fn parse_derivation_outputs(aterm: &str) -> Option<Vec<String>> {
let aterm = aterm.strip_prefix("Derive([")?;
let outputs_end = aterm.find("],[")?;
let outputs_section = &aterm[..outputs_end];
let mut outputs = Vec::new();
let mut pos = 0;
let bytes = outputs_section.as_bytes();
while pos < bytes.len() {
while pos < bytes.len() && bytes[pos] != b'(' {
pos += 1;
}
if pos >= bytes.len() {
break;
}
pos += 1;
if pos >= bytes.len() || bytes[pos] != b'"' {
break;
}
pos += 1;
let name_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
pos += 1;
}
let name = std::str::from_utf8(&bytes[name_start..pos]).ok()?;
outputs.push(name.to_string());
while pos < bytes.len() && bytes[pos] != b')' {
pos += 1;
}
pos += 1;
}
Some(outputs)
}
#[derive(serde::Serialize)]
pub(super) struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}
#[derive(serde::Serialize)]
pub(super) struct FsClosureResult {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
#[deno_core::op2]
#[serde]
pub(super) fn op_compute_fs_closure(
#[string] drv_path: String,
) -> std::result::Result<FsClosureResult, NixRuntimeError> {
use std::collections::{BTreeMap, BTreeSet, VecDeque};
let mut all_input_srcs: BTreeSet<String> = BTreeSet::new();
let mut all_input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path);
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
all_input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = match std::fs::read_to_string(&current_path) {
Ok(c) => c,
Err(e) => {
return Err(NixRuntimeError::from(format!(
"failed to read derivation {}: {}",
current_path, e
)));
}
};
let inputs = parse_derivation_inputs(&content).ok_or_else(|| {
NixRuntimeError::from(format!("failed to parse derivation {}", current_path))
})?;
for src in inputs.input_srcs {
all_input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
all_input_srcs.insert(dep_drv.clone());
let entry = all_input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
let input_drvs: Vec<(String, Vec<String>)> = all_input_drvs
.into_iter()
.map(|(k, v)| (k, v.into_iter().collect()))
.collect();
let input_srcs: Vec<String> = all_input_srcs.into_iter().collect();
Ok(FsClosureResult {
input_drvs,
input_srcs,
})
}
#[deno_core::op2]
#[string]
pub(super) fn op_copy_path_to_store<Ctx: RuntimeContext>(
@@ -1113,6 +802,19 @@ fn json_to_nix(value: serde_json::Value) -> NixJsonValue {
}
}
#[derive(Debug, Default)]
pub(super) struct DrvHashCache {
cache: HashMap<String, String>,
}
impl DrvHashCache {
pub(super) fn new() -> Self {
Self {
cache: HashMap::new(),
}
}
}
fn toml_to_nix(value: toml::Value) -> std::result::Result<NixJsonValue, NixRuntimeError> {
match value {
toml::Value::String(s) => Ok(NixJsonValue::Str(s)),
@@ -1153,3 +855,254 @@ pub(super) fn op_from_toml(
.map_err(|e| NixRuntimeError::from(format!("while parsing TOML: {e}")))?;
toml_to_nix(parsed)
}
#[derive(serde::Deserialize)]
pub(super) struct FixedOutputInput {
#[serde(rename = "hashAlgo")]
hash_algo: String,
hash: String,
#[serde(rename = "hashMode")]
hash_mode: String,
}
#[derive(serde::Deserialize)]
pub(super) struct FinalizeDerivationInput {
name: String,
builder: String,
platform: String,
outputs: Vec<String>,
args: Vec<String>,
env: Vec<(String, String)>,
context: Vec<String>,
#[serde(rename = "fixedOutput")]
fixed_output: Option<FixedOutputInput>,
}
#[derive(serde::Serialize)]
pub(super) struct FinalizeDerivationOutput {
#[serde(rename = "drvPath")]
drv_path: String,
outputs: Vec<(String, String)>,
}
fn output_path_name(drv_name: &str, output: &str) -> String {
if output == "out" {
drv_name.to_string()
} else {
format!("{}-{}", drv_name, output)
}
}
#[deno_core::op2]
#[serde]
pub(super) fn op_finalize_derivation<Ctx: RuntimeContext>(
state: &mut OpState,
#[serde] input: FinalizeDerivationInput,
) -> std::result::Result<FinalizeDerivationOutput, NixRuntimeError> {
use crate::derivation::{DerivationData, OutputInfo};
use crate::string_context::extract_input_drvs_and_srcs;
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_dir = store.get_store_dir().to_string();
let (input_drvs, input_srcs) =
extract_input_drvs_and_srcs(&input.context).map_err(NixRuntimeError::from)?;
let env: std::collections::BTreeMap<String, String> =
input.env.into_iter().collect();
let drv_path;
let output_paths: Vec<(String, String)>;
if let Some(fixed) = &input.fixed_output {
let path_name = output_path_name(&input.name, "out");
let out_path = crate::runtime::ops::op_make_fixed_output_path_impl(
&store_dir,
&fixed.hash_algo,
&fixed.hash,
&fixed.hash_mode,
&path_name,
);
let hash_algo_prefix = if fixed.hash_mode == "recursive" {
"r:"
} else {
""
};
let mut final_outputs = std::collections::BTreeMap::new();
final_outputs.insert(
"out".to_string(),
OutputInfo {
path: out_path.clone(),
hash_algo: format!("{}{}", hash_algo_prefix, fixed.hash_algo),
hash: fixed.hash.clone(),
},
);
let mut final_env = env;
final_env.insert("out".to_string(), out_path.clone());
let drv = DerivationData {
name: input.name.clone(),
outputs: final_outputs,
input_drvs: input_drvs.clone(),
input_srcs: input_srcs.clone(),
platform: input.platform,
builder: input.builder,
args: input.args,
env: final_env,
};
let final_aterm = drv.generate_aterm();
let references = drv.collect_references();
drv_path = store
.add_text_to_store(&format!("{}.drv", input.name), &final_aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
let fixed_hash_fingerprint = format!(
"fixed:out:{}{}:{}:{}",
hash_algo_prefix, fixed.hash_algo, fixed.hash, out_path,
);
let fixed_modulo_hash = crate::nix_utils::sha256_hex(fixed_hash_fingerprint.as_bytes());
let cache = state.borrow_mut::<DrvHashCache>();
cache.cache.insert(drv_path.clone(), fixed_modulo_hash);
output_paths = vec![("out".to_string(), out_path)];
} else {
let masked_outputs: std::collections::BTreeMap<String, OutputInfo> = input
.outputs
.iter()
.map(|o| {
(
o.clone(),
OutputInfo {
path: String::new(),
hash_algo: String::new(),
hash: String::new(),
},
)
})
.collect();
let mut masked_env = env.clone();
for output in &input.outputs {
masked_env.insert(output.clone(), String::new());
}
let masked_drv = DerivationData {
name: input.name.clone(),
outputs: masked_outputs,
input_drvs: input_drvs.clone(),
input_srcs: input_srcs.clone(),
platform: input.platform.clone(),
builder: input.builder.clone(),
args: input.args.clone(),
env: masked_env,
};
let mut input_drv_hashes = std::collections::BTreeMap::new();
{
let cache = state.borrow::<DrvHashCache>();
for (dep_drv_path, output_names) in &input_drvs {
let cached_hash =
cache.cache.get(dep_drv_path).ok_or_else(|| {
NixRuntimeError::from(format!(
"Missing modulo hash for input derivation: {}",
dep_drv_path
))
})?;
let mut sorted_outs: Vec<&String> = output_names.iter().collect();
sorted_outs.sort();
let outputs_csv: Vec<&str> =
sorted_outs.iter().map(|s| s.as_str()).collect();
input_drv_hashes
.insert(cached_hash.clone(), outputs_csv.join(","));
}
}
let masked_aterm = masked_drv.generate_aterm_modulo(&input_drv_hashes);
let drv_modulo_hash = crate::nix_utils::sha256_hex(masked_aterm.as_bytes());
let mut final_outputs = std::collections::BTreeMap::new();
let mut final_env = env;
let mut result_output_paths = Vec::new();
for output_name in &input.outputs {
let path_name = output_path_name(&input.name, output_name);
let out_path = crate::nix_utils::make_store_path(
&store_dir,
&format!("output:{}", output_name),
&drv_modulo_hash,
&path_name,
);
final_outputs.insert(
output_name.clone(),
OutputInfo {
path: out_path.clone(),
hash_algo: String::new(),
hash: String::new(),
},
);
final_env.insert(output_name.clone(), out_path.clone());
result_output_paths.push((output_name.clone(), out_path));
}
let final_drv = DerivationData {
name: input.name,
outputs: final_outputs,
input_drvs,
input_srcs,
platform: input.platform,
builder: input.builder,
args: input.args,
env: final_env,
};
let final_aterm = final_drv.generate_aterm();
let references = final_drv.collect_references();
drv_path = store
.add_text_to_store(&format!("{}.drv", final_drv.name), &final_aterm, references)
.map_err(|e| NixRuntimeError::from(format!("failed to write derivation: {}", e)))?;
let final_aterm_modulo = final_drv.generate_aterm_modulo(&input_drv_hashes);
let cached_modulo_hash =
crate::nix_utils::sha256_hex(final_aterm_modulo.as_bytes());
let cache = state.borrow_mut::<DrvHashCache>();
cache.cache.insert(drv_path.clone(), cached_modulo_hash);
output_paths = result_output_paths;
}
Ok(FinalizeDerivationOutput {
drv_path,
outputs: output_paths,
})
}
fn op_make_fixed_output_path_impl(
store_dir: &str,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> String {
use sha2::{Digest, Sha256};
if hash_algo == "sha256" && hash_mode == "recursive" {
crate::nix_utils::make_store_path(store_dir, "source", hash, name)
} else {
let prefix = if hash_mode == "recursive" { "r:" } else { "" };
let inner_input = format!("fixed:out:{}{}:{}:", prefix, hash_algo, hash);
let mut hasher = Sha256::new();
hasher.update(inner_input.as_bytes());
let inner_hash = hex::encode(hasher.finalize());
crate::nix_utils::make_store_path(store_dir, "output:out", &inner_hash, name)
}
}

View File

@@ -1,5 +1,3 @@
#![allow(dead_code)]
use crate::error::Result;
mod config;
@@ -39,12 +37,4 @@ pub trait Store: Send + Sync {
content: &str,
references: Vec<String>,
) -> Result<String>;
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String>;
}

View File

@@ -304,51 +304,6 @@ impl Store for DaemonStore {
Ok(store_path_str)
}
fn make_fixed_output_path(
&self,
hash_algo: &str,
hash: &str,
hash_mode: &str,
name: &str,
) -> Result<String> {
use nix_compat::nixhash::{CAHash, NixHash};
use nix_compat::store_path::build_ca_path;
let nix_hash = match hash_algo {
"sha256" => {
let hash_bytes = hex::decode(hash)
.map_err(|e| Error::internal(format!("Invalid hash hex: {}", e)))?;
if hash_bytes.len() != 32 {
return Err(Error::internal(format!(
"Invalid sha256 hash length: expected 32, got {}",
hash_bytes.len()
)));
}
let mut arr = [0u8; 32];
arr.copy_from_slice(&hash_bytes);
NixHash::Sha256(arr)
}
_ => {
return Err(Error::internal(format!(
"Unsupported hash algorithm: {}",
hash_algo
)));
}
};
let ca_hash = if hash_mode == "r" {
CAHash::Nar(nix_hash)
} else {
CAHash::Flat(nix_hash)
};
let store_path: nix_compat::store_path::StorePath<String> =
build_ca_path(name, &ca_hash, Vec::<String>::new(), false)
.map_err(|e| Error::internal(format!("Failed to build store path: {}", e)))?;
Ok(store_path.to_absolute_path())
}
}
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::from_parts(1, 37);

View File

@@ -1,3 +1,5 @@
#![allow(dead_code)]
use std::fmt;
#[derive(Debug)]

View File

@@ -0,0 +1,211 @@
use std::collections::{BTreeMap, BTreeSet, VecDeque};
pub enum StringContextElem {
Opaque { path: String },
DrvDeep { drv_path: String },
Built { drv_path: String, output: String },
}
impl StringContextElem {
pub fn decode(encoded: &str) -> Self {
if let Some(drv_path) = encoded.strip_prefix('=') {
StringContextElem::DrvDeep {
drv_path: drv_path.to_string(),
}
} else if let Some(rest) = encoded.strip_prefix('!') {
if let Some(second_bang) = rest.find('!') {
let output = rest[..second_bang].to_string();
let drv_path = rest[second_bang + 1..].to_string();
StringContextElem::Built { drv_path, output }
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
} else {
StringContextElem::Opaque {
path: encoded.to_string(),
}
}
}
}
pub type InputDrvs = BTreeMap<String, BTreeSet<String>>;
pub type Srcs = BTreeSet<String>;
pub fn extract_input_drvs_and_srcs(
context: &[String],
) -> Result<(InputDrvs, Srcs), String> {
let mut input_drvs: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut input_srcs: BTreeSet<String> = BTreeSet::new();
for encoded in context {
match StringContextElem::decode(encoded) {
StringContextElem::Opaque { path } => {
input_srcs.insert(path);
}
StringContextElem::DrvDeep { drv_path } => {
compute_fs_closure(&drv_path, &mut input_drvs, &mut input_srcs)?;
}
StringContextElem::Built { drv_path, output } => {
input_drvs.entry(drv_path).or_default().insert(output);
}
}
}
Ok((input_drvs, input_srcs))
}
fn compute_fs_closure(
drv_path: &str,
input_drvs: &mut BTreeMap<String, BTreeSet<String>>,
input_srcs: &mut BTreeSet<String>,
) -> Result<(), String> {
let mut queue: VecDeque<String> = VecDeque::new();
let mut visited: BTreeSet<String> = BTreeSet::new();
queue.push_back(drv_path.to_string());
while let Some(current_path) = queue.pop_front() {
if visited.contains(&current_path) {
continue;
}
visited.insert(current_path.clone());
input_srcs.insert(current_path.clone());
if !current_path.ends_with(".drv") {
continue;
}
let content = std::fs::read_to_string(&current_path)
.map_err(|e| format!("failed to read derivation {}: {}", current_path, e))?;
let inputs = parse_derivation_inputs(&content)
.ok_or_else(|| format!("failed to parse derivation {}", current_path))?;
for src in inputs.input_srcs {
input_srcs.insert(src.clone());
if !visited.contains(&src) {
queue.push_back(src);
}
}
for (dep_drv, outputs) in inputs.input_drvs {
input_srcs.insert(dep_drv.clone());
let entry = input_drvs.entry(dep_drv.clone()).or_default();
for output in outputs {
entry.insert(output);
}
if !visited.contains(&dep_drv) {
queue.push_back(dep_drv);
}
}
}
Ok(())
}
struct DerivationInputs {
input_drvs: Vec<(String, Vec<String>)>,
input_srcs: Vec<String>,
}
fn parse_derivation_inputs(aterm: &str) -> Option<DerivationInputs> {
let aterm = aterm.strip_prefix("Derive([")?;
let mut bracket_count: i32 = 1;
let mut pos = 0;
let bytes = aterm.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
_ => {}
}
pos += 1;
}
if bracket_count != 0 {
return None;
}
let rest = &aterm[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_drvs = Vec::new();
let mut bracket_count: i32 = 1;
let mut start = 0;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'(' if bracket_count == 1 => {
start = pos;
}
b')' if bracket_count == 1 => {
let entry = &rest[start + 1..pos];
if let Some((drv_path, outputs)) = parse_input_drv_entry(entry) {
input_drvs.push((drv_path, outputs));
}
}
_ => {}
}
pos += 1;
}
let rest = &rest[pos..];
let rest = rest.strip_prefix(",[")?;
let mut input_srcs = Vec::new();
bracket_count = 1;
pos = 0;
let bytes = rest.as_bytes();
while pos < bytes.len() && bracket_count > 0 {
match bytes[pos] {
b'[' => bracket_count += 1,
b']' => bracket_count -= 1,
b'"' if bracket_count == 1 => {
pos += 1;
let src_start = pos;
while pos < bytes.len() && bytes[pos] != b'"' {
if bytes[pos] == b'\\' && pos + 1 < bytes.len() {
pos += 2;
} else {
pos += 1;
}
}
let src = std::str::from_utf8(&bytes[src_start..pos]).ok()?;
input_srcs.push(src.to_string());
}
_ => {}
}
pos += 1;
}
Some(DerivationInputs {
input_drvs,
input_srcs,
})
}
fn parse_input_drv_entry(entry: &str) -> Option<(String, Vec<String>)> {
let entry = entry.strip_prefix('"')?;
let quote_end = entry.find('"')?;
let drv_path = entry[..quote_end].to_string();
let rest = &entry[quote_end + 1..];
let rest = rest.strip_prefix(",[")?;
let rest = rest.strip_suffix(']')?;
let mut outputs = Vec::new();
for part in rest.split(',') {
let part = part.trim();
if let Some(name) = part.strip_prefix('"').and_then(|s| s.strip_suffix('"')) {
outputs.push(name.to_string());
}
}
Some((drv_path, outputs))
}