feat: logging; copy to store; fix daemon: write_slice -> write_all

This commit is contained in:
2026-01-18 14:50:43 +08:00
parent 2441e10607
commit dcb853ea0a
12 changed files with 169 additions and 51 deletions

View File

@@ -158,8 +158,14 @@ export const coerceToString = (
// Paths coerce to their string value // Paths coerce to their string value
if (isNixPath(v)) { if (isNixPath(v)) {
// TODO: Implement copyToStore when needed (copy path to Nix store) if (copyToStore) {
// For now, just return the raw path string const pathStr = v.value;
const storePath = Deno.core.ops.op_copy_path_to_store(pathStr);
if (outContext) {
outContext.add(storePath);
}
return storePath;
}
return v.value; return v.value;
} }

View File

@@ -69,14 +69,16 @@ export const concatStringsWithContext = (parts: NixValue[]): NixString | NixPath
} }
// String concatenation mode // String concatenation mode
// Note: firstIsPath is already false at this point, otherwise we would have
// returned in the path concatenation branch above
const context: NixStringContext = new Set(); const context: NixStringContext = new Set();
const strParts: string[] = []; const strParts: string[] = [];
for (const part of parts) { for (const part of parts) {
// Handle path coercion to string
const forced = force(part); const forced = force(part);
if (isNixPath(forced)) { if (isNixPath(forced)) {
strParts.push(forced.value); const str = coerceToString(forced, StringCoercionMode.Interpolation, true, context);
strParts.push(str);
} else { } else {
const str = coerceToString(forced, StringCoercionMode.Interpolation, false, context); const str = coerceToString(forced, StringCoercionMode.Interpolation, false, context);
strParts.push(str); strParts.push(str);

View File

@@ -78,6 +78,7 @@ declare global {
): string; ): string;
function op_store_path(path: string): string; function op_store_path(path: string): string;
function op_to_file(name: string, contents: string, references: string[]): string; function op_to_file(name: string, contents: string, references: string[]): string;
function op_copy_path_to_store(path: string): string;
} }
} }
} }

View File

@@ -1,14 +1,9 @@
use anyhow::Result; use anyhow::Result;
use nix_js::context::Context; use nix_js::context::Context;
use std::process::exit; use std::process::exit;
use tracing_subscriber::EnvFilter;
fn main() -> Result<()> { fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time(); nix_js::logging::init_logging();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut args = std::env::args(); let mut args = std::env::args();
if args.len() != 2 { if args.len() != 2 {

View File

@@ -3,14 +3,9 @@ use nix_js::context::Context;
use regex::Regex; use regex::Regex;
use rustyline::DefaultEditor; use rustyline::DefaultEditor;
use rustyline::error::ReadlineError; use rustyline::error::ReadlineError;
use tracing_subscriber::EnvFilter;
fn main() -> Result<()> { fn main() -> Result<()> {
let format = tracing_subscriber::fmt::format().without_time(); nix_js::logging::init_logging();
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.event_format(format)
.init();
let mut rl = DefaultEditor::new()?; let mut rl = DefaultEditor::new()?;
let mut context = Context::new()?; let mut context = Context::new()?;

View File

@@ -70,7 +70,9 @@ impl Context {
Ok(Self { ctx, runtime, store }) Ok(Self { ctx, runtime, store })
} }
#[tracing::instrument(skip(self), fields(expr_len = expr.len()))]
pub fn eval_code(&mut self, expr: &str) -> Result<Value> { pub fn eval_code(&mut self, expr: &str) -> Result<Value> {
tracing::info!("Starting evaluation");
self.ctx.current_file = Some( self.ctx.current_file = Some(
std::env::current_dir() std::env::current_dir()
.map_err(|err| { .map_err(|err| {
@@ -78,6 +80,8 @@ impl Context {
})? })?
.join("__eval__.nix"), .join("__eval__.nix"),
); );
tracing::debug!("Compiling code");
let code = self.compile_code(expr)?; let code = self.compile_code(expr)?;
self.runtime self.runtime
@@ -85,6 +89,7 @@ impl Context {
.borrow_mut() .borrow_mut()
.put(self.store.clone()); .put(self.store.clone());
tracing::debug!("Executing JavaScript");
self.runtime self.runtime
.eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx)) .eval(format!("Nix.force({code})"), CtxPtr::new(&mut self.ctx))
} }
@@ -191,19 +196,22 @@ impl Ctx {
.expect("current_file doesn't have a parent dir") .expect("current_file doesn't have a parent dir")
} }
#[tracing::instrument(skip(self), fields(expr_len = expr.len()))]
fn compile_code(&mut self, expr: &str) -> Result<String> { fn compile_code(&mut self, expr: &str) -> Result<String> {
tracing::debug!("Parsing Nix expression");
let root = rnix::Root::parse(expr); let root = rnix::Root::parse(expr);
if !root.errors().is_empty() { if !root.errors().is_empty() {
return Err(Error::parse_error(root.errors().iter().join("; "))); return Err(Error::parse_error(root.errors().iter().join("; ")));
} }
#[allow(clippy::unwrap_used)] #[allow(clippy::unwrap_used)]
// Always `Some` since there is no parse error
let root = self let root = self
.downgrade_ctx() .downgrade_ctx()
.downgrade(root.tree().expr().unwrap())?; .downgrade(root.tree().expr().unwrap())?;
tracing::debug!("Generating JavaScript code");
let code = compile(self.get_ir(root), self); let code = compile(self.get_ir(root), self);
tracing::debug!("generated code: {}", &code); tracing::debug!("Generated code: {}", &code);
Ok(code) Ok(code)
} }
} }

View File

@@ -1,7 +1,7 @@
use deno_core::op2; use deno_core::op2;
use deno_core::OpState; use deno_core::OpState;
use serde::Serialize; use serde::Serialize;
use tracing::debug; use tracing::{debug, info, warn};
mod archive; mod archive;
pub(crate) mod cache; pub(crate) mod cache;
@@ -63,7 +63,8 @@ pub fn op_fetch_url(
use crate::store::StoreBackend; use crate::store::StoreBackend;
use std::sync::Arc; use std::sync::Arc;
debug!("fetchurl: {}", url); let _span = tracing::info_span!("op_fetch_url", url = %url).entered();
info!("fetchurl started");
let file_name = let file_name =
name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string()); name.unwrap_or_else(|| url.rsplit('/').next().unwrap_or("download").to_string());
@@ -91,16 +92,16 @@ pub fn op_fetch_url(
if let Some(ref expected) = expected_hash { if let Some(ref expected) = expected_hash {
let normalized_expected = normalize_hash(expected); let normalized_expected = normalize_hash(expected);
if cached_hash != normalized_expected { if cached_hash != normalized_expected {
debug!("Cached hash mismatch, re-fetching"); warn!("Cached hash mismatch, re-fetching");
} else { } else {
debug!("Cache hit for {}", url); info!("Cache hit");
return Ok(FetchUrlResult { return Ok(FetchUrlResult {
store_path: cached_entry.store_path.clone(), store_path: cached_entry.store_path.clone(),
hash: cached_hash.to_string(), hash: cached_hash.to_string(),
}); });
} }
} else { } else {
debug!("Cache hit for {} (no hash check)", url); info!("Cache hit (no hash check)");
return Ok(FetchUrlResult { return Ok(FetchUrlResult {
store_path: cached_entry.store_path.clone(), store_path: cached_entry.store_path.clone(),
hash: cached_hash.to_string(), hash: cached_hash.to_string(),
@@ -108,12 +109,14 @@ pub fn op_fetch_url(
} }
} }
debug!("Cache miss, downloading {}", url); info!("Cache miss, downloading");
let downloader = Downloader::new(); let downloader = Downloader::new();
let data = downloader let data = downloader
.download(&url) .download(&url)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
info!(bytes = data.len(), "Download complete");
let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data)); let hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
if let Some(ref expected) = expected_hash { if let Some(ref expected) = expected_hash {
@@ -132,6 +135,8 @@ pub fn op_fetch_url(
.add_to_store(&file_name, &data, false, vec![]) .add_to_store(&file_name, &data, false, vec![])
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
info!(store_path = %store_path, "Added to store");
#[cfg(unix)] #[cfg(unix)]
if executable { if executable {
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
@@ -169,10 +174,8 @@ pub fn op_fetch_tarball(
use crate::store::StoreBackend; use crate::store::StoreBackend;
use std::sync::Arc; use std::sync::Arc;
debug!( let _span = tracing::info_span!("op_fetch_tarball", url = %url).entered();
"fetchTarball: url={}, expected_hash={:?}, expected_nar_hash={:?}", info!("fetchTarball started");
url, expected_hash, expected_nar_hash
);
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());
let metadata_cache = let metadata_cache =
@@ -202,7 +205,7 @@ pub fn op_fetch_tarball(
if let Some(ref expected_nar) = expected_nar_hash { if let Some(ref expected_nar) = expected_nar_hash {
let normalized_expected = normalize_hash(expected_nar); let normalized_expected = normalize_hash(expected_nar);
if cached_nar_hash == normalized_expected { if cached_nar_hash == normalized_expected {
debug!("Cache hit for tarball {}", url); info!("Cache hit");
return Ok(FetchTarballResult { return Ok(FetchTarballResult {
store_path: cached_entry.store_path.clone(), store_path: cached_entry.store_path.clone(),
hash: cached_tarball_hash.to_string(), hash: cached_tarball_hash.to_string(),
@@ -210,7 +213,7 @@ pub fn op_fetch_tarball(
}); });
} }
} else if !cached_entry.is_expired(3600) { } else if !cached_entry.is_expired(3600) {
debug!("Cache hit for tarball {} (no hash check)", url); info!("Cache hit (no hash check)");
return Ok(FetchTarballResult { return Ok(FetchTarballResult {
store_path: cached_entry.store_path.clone(), store_path: cached_entry.store_path.clone(),
hash: cached_tarball_hash.to_string(), hash: cached_tarball_hash.to_string(),
@@ -219,12 +222,14 @@ pub fn op_fetch_tarball(
} }
} }
debug!("Cache miss, downloading tarball from {}", url); info!("Cache miss, downloading tarball");
let downloader = Downloader::new(); let downloader = Downloader::new();
let data = downloader let data = downloader
.download(&url) .download(&url)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
info!(bytes = data.len(), "Download complete");
let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data)); let tarball_hash = crate::nix_hash::sha256_hex(&String::from_utf8_lossy(&data));
if let Some(ref expected) = expected_hash { if let Some(ref expected) = expected_hash {
@@ -237,17 +242,20 @@ pub fn op_fetch_tarball(
} }
} }
info!("Extracting tarball");
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let extracted_path = cache let (extracted_path, _temp_dir) = cache
.extract_tarball_to_temp(&data) .extract_tarball_to_temp(&data)
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
info!("Computing NAR hash");
let nar_hash = let nar_hash =
nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?; nar::compute_nar_hash(&extracted_path).map_err(|e| NixError::from(e.to_string()))?;
debug!( debug!(
"fetchTarball: computed tarball_hash={}, nar_hash={}", tarball_hash = %tarball_hash,
tarball_hash, nar_hash nar_hash = %nar_hash,
"Hash computation complete"
); );
if let Some(ref expected) = expected_nar_hash { if let Some(ref expected) = expected_nar_hash {
@@ -260,12 +268,15 @@ pub fn op_fetch_tarball(
} }
} }
info!("Adding to store");
let store = state.borrow::<Arc<StoreBackend>>(); let store = state.borrow::<Arc<StoreBackend>>();
let store_path = store let store_path = store
.as_store() .as_store()
.add_to_store_from_path(&dir_name, &extracted_path, vec![]) .add_to_store_from_path(&dir_name, &extracted_path, vec![])
.map_err(|e| NixError::from(e.to_string()))?; .map_err(|e| NixError::from(e.to_string()))?;
info!(store_path = %store_path, "Added to store");
let info = serde_json::json!({ let info = serde_json::json!({
"tarball_hash": tarball_hash, "tarball_hash": tarball_hash,
"nar_hash": nar_hash, "nar_hash": nar_hash,
@@ -299,7 +310,8 @@ pub fn op_fetch_git(
use crate::store::StoreBackend; use crate::store::StoreBackend;
use std::sync::Arc; use std::sync::Arc;
debug!("fetchGit: {} (ref: {:?}, rev: {:?})", url, git_ref, rev); let _span = tracing::info_span!("op_fetch_git", url = %url).entered();
info!("fetchGit started");
let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?; let cache = FetcherCache::new().map_err(|e| NixError::from(e.to_string()))?;
let dir_name = name.unwrap_or_else(|| "source".to_string()); let dir_name = name.unwrap_or_else(|| "source".to_string());

View File

@@ -78,10 +78,10 @@ impl FetcherCache {
self.hg_cache_dir().join(key) self.hg_cache_dir().join(key)
} }
pub fn extract_tarball_to_temp(&self, data: &[u8]) -> Result<PathBuf, CacheError> { pub fn extract_tarball_to_temp(&self, data: &[u8]) -> Result<(PathBuf, tempfile::TempDir), CacheError> {
let temp_dir = tempfile::tempdir()?; let temp_dir = tempfile::tempdir()?;
let extracted_path = super::archive::extract_archive(data, temp_dir.path())?; let extracted_path = super::archive::extract_archive(data, temp_dir.path())?;
Ok(extracted_path) Ok((extracted_path, temp_dir))
} }
} }

View File

@@ -2,6 +2,7 @@
pub mod context; pub mod context;
pub mod error; pub mod error;
pub mod logging;
pub mod value; pub mod value;
mod codegen; mod codegen;

63
nix-js/src/logging.rs Normal file
View File

@@ -0,0 +1,63 @@
use std::env;
use std::io::IsTerminal;
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
pub fn init_logging() {
let is_terminal = std::io::stderr().is_terminal();
let show_time = env::var("NIX_JS_LOG_TIME")
.map(|v| v == "1" || v.to_lowercase() == "true")
.unwrap_or(false);
let filter = EnvFilter::from_default_env();
let fmt_layer = fmt::layer()
.with_target(true)
.with_thread_ids(false)
.with_thread_names(false)
.with_file(false)
.with_line_number(false)
.with_ansi(is_terminal)
.with_level(true);
let fmt_layer = if show_time {
fmt_layer
.with_timer(fmt::time::uptime())
.boxed()
} else {
fmt_layer.without_time().boxed()
};
tracing_subscriber::registry()
.with(filter)
.with(fmt_layer)
.init();
}
#[macro_export]
macro_rules! trace_span {
($name:expr) => {
tracing::trace_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::trace_span!($name, $($field)*)
};
}
#[macro_export]
macro_rules! debug_span {
($name:expr) => {
tracing::debug_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::debug_span!($name, $($field)*)
};
}
#[macro_export]
macro_rules! info_span {
($name:expr) => {
tracing::info_span!($name)
};
($name:expr, $($field:tt)*) => {
tracing::info_span!($name, $($field)*)
};
}

View File

@@ -34,6 +34,7 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_add_path(), op_add_path(),
op_store_path(), op_store_path(),
op_to_file(), op_to_file(),
op_copy_path_to_store(),
]; ];
ops.extend(crate::fetcher::register_ops()); ops.extend(crate::fetcher::register_ops());
@@ -80,6 +81,7 @@ fn op_import<Ctx: RuntimeContext>(
state: &mut OpState, state: &mut OpState,
#[string] path: String, #[string] path: String,
) -> std::result::Result<String, NixError> { ) -> std::result::Result<String, NixError> {
let _span = tracing::info_span!("op_import", path = %path).entered();
let ctx = state.borrow_mut::<Ctx>(); let ctx = state.borrow_mut::<Ctx>();
let current_dir = ctx.get_current_dir(); let current_dir = ctx.get_current_dir();
@@ -91,10 +93,12 @@ fn op_import<Ctx: RuntimeContext>(
absolute_path.push("default.nix") absolute_path.push("default.nix")
} }
tracing::info!("Importing file: {}", absolute_path.display());
let content = std::fs::read_to_string(&absolute_path) let content = std::fs::read_to_string(&absolute_path)
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?; .map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
tracing::debug!("compiling file: {}", absolute_path.display()); tracing::debug!("Compiling file");
ctx.set_current_file(absolute_path); ctx.set_current_file(absolute_path);
Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?) Ok(ctx.compile_code(&content).map_err(|err| err.to_string())?)
@@ -251,8 +255,11 @@ fn op_add_path(
} }
let store = state.borrow::<Arc<StoreBackend>>(); let store = state.borrow::<Arc<StoreBackend>>();
let store_dir = store.as_store().get_store_dir();
let store_path = crate::nix_hash::make_store_path(store_dir, "source", &computed_hash, &computed_name); let store_path = store
.as_store()
.add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?;
Ok(store_path) Ok(store_path)
} }
@@ -334,6 +341,37 @@ fn op_to_file(
Ok(store_path) Ok(store_path)
} }
#[deno_core::op2]
#[string]
fn op_copy_path_to_store(
state: &mut OpState,
#[string] path: String,
) -> std::result::Result<String, NixError> {
use crate::store::StoreBackend;
use std::path::Path;
use std::sync::Arc;
let path_obj = Path::new(&path);
if !path_obj.exists() {
return Err(NixError::from(format!("path '{}' does not exist", path)));
}
let name = path_obj
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string();
let store = state.borrow::<Arc<StoreBackend>>();
let store_path = store
.as_store()
.add_to_store_from_path(&name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to copy path to store: {}", e)))?;
Ok(store_path)
}
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime, js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>, is_thunk_symbol: v8::Global<v8::Symbol>,

View File

@@ -703,14 +703,17 @@ impl NixDaemonClient {
request: AddToStoreNarRequest, request: AddToStoreNarRequest,
nar_data: &[u8], nar_data: &[u8],
) -> IoResult<()> { ) -> IoResult<()> {
// Send operation tracing::debug!(
"add_to_store_nar: path={}, nar_size={}",
request.path.to_absolute_path(),
request.nar_size,
);
self.writer.write_value(&Operation::AddToStoreNar).await?; self.writer.write_value(&Operation::AddToStoreNar).await?;
// Manually serialize request fields
self.writer.write_value(&request.path).await?; self.writer.write_value(&request.path).await?;
self.writer.write_value(&request.deriver).await?; self.writer.write_value(&request.deriver).await?;
// Write nar_hash as hex string
let nar_hash_hex = hex::encode(request.nar_hash.as_ref()); let nar_hash_hex = hex::encode(request.nar_hash.as_ref());
self.writer.write_value(&nar_hash_hex).await?; self.writer.write_value(&nar_hash_hex).await?;
@@ -723,25 +726,19 @@ impl NixDaemonClient {
self.writer.write_value(&request.repair).await?; self.writer.write_value(&request.repair).await?;
self.writer.write_value(&request.dont_check_sigs).await?; self.writer.write_value(&request.dont_check_sigs).await?;
// For protocol >= 1.23, use framed protocol
// For < 1.23, send NAR data directly
if self.protocol_version.minor() >= 23 { if self.protocol_version.minor() >= 23 {
// Write frame header (length)
self.writer.write_number(nar_data.len() as u64).await?; self.writer.write_number(nar_data.len() as u64).await?;
// Write NAR data self.writer.write_all(nar_data).await?;
self.writer.write_slice(nar_data).await?;
// Write end-of-frame marker (0)
self.writer.write_number(0u64).await?; self.writer.write_number(0u64).await?;
} else { } else {
// Pre-framed protocol: write NAR data directly
self.writer.write_slice(nar_data).await?; self.writer.write_slice(nar_data).await?;
} }
self.writer.flush().await?; self.writer.flush().await?;
// Read response - expect STDERR_LAST with no value
loop { loop {
let msg = self.reader.read_number().await?; let msg = self.reader.read_number().await?;
if msg == STDERR_LAST { if msg == STDERR_LAST {
return Ok(()); return Ok(());
} else if msg == STDERR_ERROR { } else if msg == STDERR_ERROR {