fix: path related tests

This commit is contained in:
2026-01-31 14:58:57 +08:00
parent 13874ca6ca
commit 1cfa8223c6
7 changed files with 361 additions and 80 deletions

View File

@@ -3,7 +3,7 @@
* Implemented via Rust ops exposed through deno_core * Implemented via Rust ops exposed through deno_core
*/ */
import { forceAttrs, forceBool, forceList, forceStringNoCtx, forceStringValue } from "../type-assert"; import { forceAttrs, forceBool, forceFunction, forceList, forceStringNoCtx, forceStringValue } from "../type-assert";
import type { NixValue, NixAttrs, NixPath } from "../types"; import type { NixValue, NixAttrs, NixPath } from "../types";
import { isNixPath, IS_PATH, CatchableError } from "../types"; import { isNixPath, IS_PATH, CatchableError } from "../types";
import { force } from "../thunk"; import { force } from "../thunk";
@@ -333,7 +333,7 @@ export const pathExists = (path: NixValue): boolean => {
* Parameters (attribute set): * Parameters (attribute set):
* - path (required): Path to add to the store * - path (required): Path to add to the store
* - name (optional): Name to use in store path (defaults to basename) * - name (optional): Name to use in store path (defaults to basename)
* - filter (optional): Function (path, type) -> bool (NOT IMPLEMENTED YET) * - filter (optional): Function (path, type) -> bool
* - recursive (optional): Boolean, default true (NAR vs flat hashing) * - recursive (optional): Boolean, default true (NAR vs flat hashing)
* - sha256 (optional): Expected SHA-256 hash (hex-encoded) * - sha256 (optional): Expected SHA-256 hash (hex-encoded)
* *
@@ -366,9 +366,30 @@ export const path = (args: NixValue): string => {
// Optional: sha256 parameter // Optional: sha256 parameter
const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null; const sha256 = "sha256" in attrs ? forceStringValue(attrs.sha256) : null;
// TODO: Handle filter parameter // Handle filter parameter
if ("filter" in attrs) { if ("filter" in attrs) {
throw new Error("builtins.path: 'filter' parameter is not yet implemented"); const filterFn = forceFunction(attrs.filter);
const entries: [string, string][] = Deno.core.ops.op_walk_dir(pathStr);
const includePaths: string[] = [];
for (const [relPath, fileType] of entries) {
const fullPath = pathStr + "/" + relPath;
const innerFn = forceFunction(filterFn(fullPath));
const shouldInclude = force(innerFn(fileType));
if (shouldInclude === true) {
includePaths.push(relPath);
}
}
const storePath: string = Deno.core.ops.op_add_filtered_path(
pathStr,
name,
recursive,
sha256,
includePaths,
);
return storePath;
} }
// Call Rust op to add path to store // Call Rust op to add path to store

View File

@@ -83,6 +83,14 @@ declare global {
function op_to_file(name: string, contents: string, references: string[]): string; function op_to_file(name: string, contents: string, references: string[]): string;
function op_copy_path_to_store(path: string): string; function op_copy_path_to_store(path: string): string;
function op_get_env(key: string): string; function op_get_env(key: string): string;
function op_walk_dir(path: string): [string, string][];
function op_add_filtered_path(
path: string,
name: string | null,
recursive: boolean,
sha256: string | null,
include_paths: string[],
): string;
} }
} }
} }

View File

@@ -44,6 +44,13 @@ impl From<Source> for NamedSource<Arc<str>> {
} }
impl Source { impl Source {
pub fn new_file(path: PathBuf) -> std::io::Result<Self> {
Ok(Source {
src: std::fs::read_to_string(&path)?.into(),
ty: crate::error::SourceType::File(Arc::new(path)),
})
}
pub fn new_eval(src: String) -> Result<Self> { pub fn new_eval(src: String) -> Result<Self> {
Ok(Self { Ok(Self {
ty: std::env::current_dir() ty: std::env::current_dir()

View File

@@ -41,6 +41,47 @@ pub fn nix_base32_encode(bytes: &[u8]) -> String {
result result
} }
pub fn nix_base32_decode(input: &str) -> Option<Vec<u8>> {
let len = input.len() * 5 / 8;
let mut bytes = vec![0u8; len];
for (n, ch) in input.chars().rev().enumerate() {
let digit = NIX_BASE32_CHARS.iter().position(|&c| c == ch as u8)? as u16;
let b = n * 5;
let i = b / 8;
let j = b % 8;
if i < len {
bytes[i] |= (digit << j) as u8;
}
if j > 3 && i + 1 < len {
bytes[i + 1] |= (digit >> (8 - j)) as u8;
}
}
Some(bytes)
}
pub fn decode_hash_to_hex(hash_str: &str) -> Option<String> {
if hash_str.starts_with("sha256:") {
let rest = &hash_str[7..];
return decode_hash_to_hex(rest);
}
if hash_str.starts_with("sha256-") {
let base64_str = &hash_str[7..];
use base64::{Engine, engine::general_purpose::STANDARD};
let bytes = STANDARD.decode(base64_str).ok()?;
return Some(hex::encode(bytes));
}
if hash_str.len() == 64 && hash_str.chars().all(|c| c.is_ascii_hexdigit()) {
return Some(hash_str.to_string());
}
if hash_str.len() == 52 {
let bytes = nix_base32_decode(hash_str)?;
return Some(hex::encode(bytes));
}
None
}
pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String { pub fn make_store_path(store_dir: &str, ty: &str, hash_hex: &str, name: &str) -> String {
let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name); let s = format!("{}:sha256:{}:{}:{}", ty, hash_hex, store_dir, name);

View File

@@ -58,6 +58,8 @@ fn runtime_extension<Ctx: RuntimeContext>() -> Extension {
op_to_file::<Ctx>(), op_to_file::<Ctx>(),
op_copy_path_to_store::<Ctx>(), op_copy_path_to_store::<Ctx>(),
op_get_env(), op_get_env(),
op_walk_dir(),
op_add_filtered_path::<Ctx>(),
]; ];
ops.extend(crate::fetcher::register_ops::<Ctx>()); ops.extend(crate::fetcher::register_ops::<Ctx>());
@@ -140,12 +142,8 @@ fn op_import<Ctx: RuntimeContext>(
tracing::info!("Importing file: {}", absolute_path.display()); tracing::info!("Importing file: {}", absolute_path.display());
let content = std::fs::read_to_string(absolute_path.as_path()) let source = Source::new_file(absolute_path.clone())
.map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?; .map_err(|e| format!("Failed to read {}: {}", absolute_path.display(), e))?;
let source = Source {
ty: crate::error::SourceType::File(absolute_path.into()),
src: content.into(),
};
tracing::debug!("Compiling file"); tracing::debug!("Compiling file");
ctx.add_source(source.clone()); ctx.add_source(source.clone());
@@ -161,7 +159,17 @@ fn op_read_file(#[string] path: String) -> std::result::Result<String, NixError>
#[deno_core::op2(fast)] #[deno_core::op2(fast)]
fn op_path_exists(#[string] path: String) -> bool { fn op_path_exists(#[string] path: String) -> bool {
std::path::Path::new(&path).exists() let must_be_dir = path.ends_with('/') || path.ends_with("/.");
let p = Path::new(&path);
if must_be_dir {
match std::fs::metadata(p) {
Ok(m) => m.is_dir(),
Err(_) => false,
}
} else {
std::fs::symlink_metadata(p).is_ok()
}
} }
#[deno_core::op2] #[deno_core::op2]
@@ -400,7 +408,8 @@ fn op_add_path<Ctx: RuntimeContext>(
}); });
let computed_hash = if recursive { let computed_hash = if recursive {
compute_nar_hash(path_obj)? crate::nar::compute_nar_hash(path_obj)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else { } else {
if !path_obj.is_file() { if !path_obj.is_file() {
return Err(NixError::from( return Err(NixError::from(
@@ -415,60 +424,35 @@ fn op_add_path<Ctx: RuntimeContext>(
hex::encode(hasher.finalize()) hex::encode(hasher.finalize())
}; };
if let Some(expected_hash) = sha256 if let Some(expected_hash) = sha256 {
&& computed_hash != expected_hash let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
{ .ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!( return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}", "hash mismatch for path '{}': expected {}, got {}",
path, expected_hash, computed_hash path, expected_hex, computed_hash
))); )));
} }
}
let ctx: &Ctx = state.get_ctx(); let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store(); let store = ctx.get_store();
let store_path = store let store_path = if recursive {
store
.add_to_store_from_path(&computed_name, path_obj, vec![]) .add_to_store_from_path(&computed_name, path_obj, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?; .map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?
} else {
let contents = fs::read(path_obj)
.map_err(|e| NixError::from(format!("failed to read '{}': {}", path, e)))?;
store
.add_to_store(&computed_name, &contents, false, vec![])
.map_err(|e| NixError::from(format!("failed to add to store: {}", e)))?
};
Ok(store_path) Ok(store_path)
} }
fn compute_nar_hash(path: &std::path::Path) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
if path.is_file() {
let contents =
fs::read(path).map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
Ok(hex::encode(hasher.finalize()))
} else if path.is_dir() {
let mut entries: Vec<_> = fs::read_dir(path)
.map_err(|e| NixError::from(format!("failed to read directory: {}", e)))?
.filter_map(std::result::Result::ok)
.collect();
entries.sort_by_key(|e| e.file_name());
let mut hasher = Sha256::new();
for entry in entries {
let entry_path = entry.path();
let entry_name = entry.file_name();
hasher.update(entry_name.to_string_lossy().as_bytes());
let entry_hash = compute_nar_hash(&entry_path)?;
hasher.update(entry_hash.as_bytes());
}
Ok(hex::encode(hasher.finalize()))
} else {
Ok(String::new())
}
}
#[deno_core::op2] #[deno_core::op2]
#[string] #[string]
fn op_store_path<Ctx: RuntimeContext>( fn op_store_path<Ctx: RuntimeContext>(
@@ -546,6 +530,161 @@ fn op_get_env(#[string] key: String) -> std::result::Result<String, NixError> {
} }
} }
#[deno_core::op2]
#[serde]
fn op_walk_dir(
#[string] path: String,
) -> std::result::Result<Vec<(String, String)>, NixError> {
fn walk_recursive(
base: &Path,
current: &Path,
results: &mut Vec<(String, String)>,
) -> std::result::Result<(), NixError> {
let entries = std::fs::read_dir(current)
.map_err(|e| NixError::from(format!("failed to read directory: {}", e)))?;
for entry in entries {
let entry =
entry.map_err(|e| NixError::from(format!("failed to read entry: {}", e)))?;
let path = entry.path();
let rel_path = path
.strip_prefix(base)
.map_err(|e| NixError::from(format!("failed to get relative path: {}", e)))?
.to_string_lossy()
.to_string();
let file_type = entry
.file_type()
.map_err(|e| NixError::from(format!("failed to get file type: {}", e)))?;
let type_str = if file_type.is_dir() {
"directory"
} else if file_type.is_symlink() {
"symlink"
} else {
"regular"
};
results.push((rel_path.clone(), type_str.to_string()));
if file_type.is_dir() {
walk_recursive(base, &path, results)?;
}
}
Ok(())
}
let path = Path::new(&path);
if !path.is_dir() {
return Err(NixError::from(format!(
"{} is not a directory",
path.display()
)));
}
let mut results = Vec::new();
walk_recursive(path, path, &mut results)?;
Ok(results)
}
#[deno_core::op2]
#[string]
fn op_add_filtered_path<Ctx: RuntimeContext>(
state: &mut OpState,
#[string] src_path: String,
#[string] name: Option<String>,
recursive: bool,
#[string] sha256: Option<String>,
#[serde] include_paths: Vec<String>,
) -> std::result::Result<String, NixError> {
use sha2::{Digest, Sha256};
use std::fs;
let src = Path::new(&src_path);
if !src.exists() {
return Err(NixError::from(format!("path '{}' does not exist", src_path)));
}
let computed_name = name.unwrap_or_else(|| {
src.file_name()
.and_then(|n| n.to_str())
.unwrap_or("source")
.to_string()
});
let temp_dir = tempfile::tempdir()
.map_err(|e| NixError::from(format!("failed to create temp dir: {}", e)))?;
let dest = temp_dir.path().join(&computed_name);
fs::create_dir_all(&dest)
.map_err(|e| NixError::from(format!("failed to create dest dir: {}", e)))?;
for rel_path in &include_paths {
let src_file = src.join(rel_path);
let dest_file = dest.join(rel_path);
if let Some(parent) = dest_file.parent() {
fs::create_dir_all(parent)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
}
let metadata = fs::symlink_metadata(&src_file)
.map_err(|e| NixError::from(format!("failed to read metadata: {}", e)))?;
if metadata.is_symlink() {
let target = fs::read_link(&src_file)
.map_err(|e| NixError::from(format!("failed to read symlink: {}", e)))?;
#[cfg(unix)]
std::os::unix::fs::symlink(&target, &dest_file)
.map_err(|e| NixError::from(format!("failed to create symlink: {}", e)))?;
#[cfg(not(unix))]
return Err(NixError::from("symlinks not supported on this platform"));
} else if metadata.is_dir() {
fs::create_dir_all(&dest_file)
.map_err(|e| NixError::from(format!("failed to create dir: {}", e)))?;
} else {
fs::copy(&src_file, &dest_file)
.map_err(|e| NixError::from(format!("failed to copy file: {}", e)))?;
}
}
let computed_hash = if recursive {
crate::nar::compute_nar_hash(&dest)
.map_err(|e| NixError::from(format!("failed to compute NAR hash: {}", e)))?
} else {
if !dest.is_file() {
return Err(NixError::from(
"when 'recursive' is false, path must be a regular file",
));
}
let contents = fs::read(&dest)
.map_err(|e| NixError::from(format!("failed to read file: {}", e)))?;
let mut hasher = Sha256::new();
hasher.update(&contents);
hex::encode(hasher.finalize())
};
if let Some(expected_hash) = sha256 {
let expected_hex = crate::nix_hash::decode_hash_to_hex(&expected_hash)
.ok_or_else(|| NixError::from(format!("invalid hash format: {}", expected_hash)))?;
if computed_hash != expected_hex {
return Err(NixError::from(format!(
"hash mismatch for path '{}': expected {}, got {}",
src_path, expected_hex, computed_hash
)));
}
}
let ctx: &Ctx = state.get_ctx();
let store = ctx.get_store();
let store_path = store
.add_to_store_from_path(&computed_name, &dest, vec![])
.map_err(|e| NixError::from(format!("failed to add path to store: {}", e)))?;
Ok(store_path)
}
pub(crate) struct Runtime<Ctx: RuntimeContext> { pub(crate) struct Runtime<Ctx: RuntimeContext> {
js_runtime: JsRuntime, js_runtime: JsRuntime,
is_thunk_symbol: v8::Global<v8::Symbol>, is_thunk_symbol: v8::Global<v8::Symbol>,

View File

@@ -12,15 +12,20 @@ fn get_lang_dir() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/lang") PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/lang")
} }
fn eval_file(name: &str) -> Result<Value, String> { fn eval_file(name: &str) -> Result<(Value, Source), String> {
let lang_dir = get_lang_dir(); let lang_dir = get_lang_dir();
let nix_path = lang_dir.join(format!("{name}.nix")); let nix_path = lang_dir.join(format!("{name}.nix"));
let expr = format!(r#"import "{}""#, nix_path.display()); let expr = format!(r#"import "{}""#, nix_path.display());
let mut ctx = Context::new().map_err(|e| e.to_string())?; let mut ctx = Context::new().map_err(|e| e.to_string())?;
let source = Source::new_eval(expr).map_err(|e| e.to_string())?; let source = Source {
ctx.eval_code(source).map_err(|e| e.to_string()) ty: nix_js::error::SourceType::File(nix_path.into()),
src: expr.into(),
};
ctx.eval_code(source.clone())
.map(|val| (val, source))
.map_err(|e| e.to_string())
} }
fn read_expected(name: &str) -> String { fn read_expected(name: &str) -> String {
@@ -47,8 +52,17 @@ macro_rules! eval_okay_test {
.replace("r#", ""); .replace("r#", "");
let result = eval_file(&test_name); let result = eval_file(&test_name);
match result { match result {
Ok(value) => { Ok((value, source)) => {
let actual = format_value(&value); let actual = format_value(&value);
let actual = actual.replace(
source
.get_dir()
.parent()
.unwrap()
.to_string_lossy()
.as_ref(),
"/pwd",
);
let expected = read_expected(&test_name); let expected = read_expected(&test_name);
assert_eq!(actual, expected, "Output mismatch for {}", test_name); assert_eq!(actual, expected, "Output mismatch for {}", test_name);
} }
@@ -86,8 +100,14 @@ eval_okay_test!(attrs2);
eval_okay_test!(attrs3); eval_okay_test!(attrs3);
eval_okay_test!(attrs4); eval_okay_test!(attrs4);
eval_okay_test!(attrs5); eval_okay_test!(attrs5);
eval_okay_test!(#[ignore = "__overrides is not supported"] attrs6); eval_okay_test!(
eval_okay_test!(#[ignore = "requires --arg/--argstr CLI flags"] autoargs); #[ignore = "__overrides is not supported"]
attrs6
);
eval_okay_test!(
#[ignore = "requires --arg/--argstr CLI flags"]
autoargs
);
eval_okay_test!(backslash_newline_1); eval_okay_test!(backslash_newline_1);
eval_okay_test!(backslash_newline_2); eval_okay_test!(backslash_newline_2);
eval_okay_test!(baseNameOf); eval_okay_test!(baseNameOf);
@@ -102,7 +122,10 @@ eval_okay_test!(concatmap);
eval_okay_test!(concatstringssep); eval_okay_test!(concatstringssep);
eval_okay_test!(context); eval_okay_test!(context);
eval_okay_test!(context_introspection); eval_okay_test!(context_introspection);
eval_okay_test!(#[ignore = "not implemented: convertHash"] convertHash); eval_okay_test!(
#[ignore = "not implemented: convertHash"]
convertHash
);
eval_okay_test!(curpos); eval_okay_test!(curpos);
eval_okay_test!(deepseq); eval_okay_test!(deepseq);
eval_okay_test!(delayed_with); eval_okay_test!(delayed_with);
@@ -117,7 +140,10 @@ eval_okay_test!(empty_args);
eval_okay_test!(eq); eval_okay_test!(eq);
eval_okay_test!(eq_derivations); eval_okay_test!(eq_derivations);
eval_okay_test!(filter); eval_okay_test!(filter);
eval_okay_test!(#[ignore = "not implemented: flakeRefToString"] flake_ref_to_string); eval_okay_test!(
#[ignore = "not implemented: flakeRefToString"]
flake_ref_to_string
);
eval_okay_test!(flatten); eval_okay_test!(flatten);
eval_okay_test!(float); eval_okay_test!(float);
eval_okay_test!(floor_ceil); eval_okay_test!(floor_ceil);
@@ -126,23 +152,39 @@ eval_okay_test!(foldlStrict_lazy_elements);
eval_okay_test!(foldlStrict_lazy_initial_accumulator); eval_okay_test!(foldlStrict_lazy_initial_accumulator);
eval_okay_test!(fromjson); eval_okay_test!(fromjson);
eval_okay_test!(fromjson_escapes); eval_okay_test!(fromjson_escapes);
eval_okay_test!(#[ignore = "not implemented: fromTOML"] fromTOML); eval_okay_test!(
eval_okay_test!(#[ignore = "not implemented: fromTOML"] fromTOML_timestamps); #[ignore = "not implemented: fromTOML"]
fromTOML
);
eval_okay_test!(
#[ignore = "not implemented: fromTOML"]
fromTOML_timestamps
);
eval_okay_test!(functionargs); eval_okay_test!(functionargs);
eval_okay_test!(#[ignore = "not implemented: hashFile"] hashfile); eval_okay_test!(
eval_okay_test!(#[ignore = "not implemented: hashString"] hashstring); #[ignore = "not implemented: hashFile"]
hashfile
);
eval_okay_test!(
#[ignore = "not implemented: hashString"]
hashstring
);
eval_okay_test!(getattrpos); eval_okay_test!(getattrpos);
eval_okay_test!(getattrpos_functionargs); eval_okay_test!(getattrpos_functionargs);
eval_okay_test!(getattrpos_undefined); eval_okay_test!(getattrpos_undefined);
eval_okay_test!(getenv, || { eval_okay_test!(getenv, || {
unsafe { unsafe { std::env::set_var("TEST_VAR", "foo") };
std::env::set_var("TEST_VAR", "foo")
};
}); });
eval_okay_test!(#[ignore = "not implemented: hashString"] groupBy); eval_okay_test!(
#[ignore = "not implemented: hashString"]
groupBy
);
eval_okay_test!(r#if); eval_okay_test!(r#if);
eval_okay_test!(ind_string); eval_okay_test!(ind_string);
eval_okay_test!(#[ignore = "not implemented: scopedImport"] import); eval_okay_test!(
#[ignore = "not implemented: scopedImport"]
import
);
eval_okay_test!(inherit_attr_pos); eval_okay_test!(inherit_attr_pos);
eval_okay_test!(inherit_from); eval_okay_test!(inherit_from);
eval_okay_test!(intersectAttrs); eval_okay_test!(intersectAttrs);
@@ -156,12 +198,22 @@ eval_okay_test!(merge_dynamic_attrs);
eval_okay_test!(nested_with); eval_okay_test!(nested_with);
eval_okay_test!(new_let); eval_okay_test!(new_let);
eval_okay_test!(null_dynamic_attrs); eval_okay_test!(null_dynamic_attrs);
eval_okay_test!(#[ignore = "__overrides is not supported"] overrides); eval_okay_test!(
eval_okay_test!(#[ignore = "not implemented: parseFlakeRef"] parse_flake_ref); #[ignore = "__overrides is not supported"]
overrides
);
eval_okay_test!(
#[ignore = "not implemented: parseFlakeRef"]
parse_flake_ref
);
eval_okay_test!(partition); eval_okay_test!(partition);
eval_okay_test!(path); eval_okay_test!(path);
eval_okay_test!(pathexists); eval_okay_test!(pathexists);
eval_okay_test!(path_string_interpolation); eval_okay_test!(path_string_interpolation, || {
unsafe {
std::env::set_var("HOME", "/fake-home");
}
});
eval_okay_test!(patterns); eval_okay_test!(patterns);
eval_okay_test!(print); eval_okay_test!(print);
eval_okay_test!(readDir); eval_okay_test!(readDir);
@@ -177,7 +229,10 @@ eval_okay_test!(remove);
eval_okay_test!(repeated_empty_attrs); eval_okay_test!(repeated_empty_attrs);
eval_okay_test!(repeated_empty_list); eval_okay_test!(repeated_empty_list);
eval_okay_test!(replacestrings); eval_okay_test!(replacestrings);
eval_okay_test!(#[ignore = "requires -I CLI flags"] search_path); eval_okay_test!(
#[ignore = "requires -I CLI flags"]
search_path
);
eval_okay_test!(scope_1); eval_okay_test!(scope_1);
eval_okay_test!(scope_2); eval_okay_test!(scope_2);
eval_okay_test!(scope_3); eval_okay_test!(scope_3);
@@ -194,13 +249,22 @@ eval_okay_test!(substring_context);
eval_okay_test!(symlink_resolution); eval_okay_test!(symlink_resolution);
eval_okay_test!(tail_call_1); eval_okay_test!(tail_call_1);
eval_okay_test!(tojson); eval_okay_test!(tojson);
eval_okay_test!(#[ignore = "not implemented: toXML"] toxml); eval_okay_test!(
eval_okay_test!(#[ignore = "not implemented: toXML"] toxml2); #[ignore = "not implemented: toXML"]
toxml
);
eval_okay_test!(
#[ignore = "not implemented: toXML"]
toxml2
);
eval_okay_test!(tryeval); eval_okay_test!(tryeval);
eval_okay_test!(types); eval_okay_test!(types);
eval_okay_test!(versions); eval_okay_test!(versions);
eval_okay_test!(with); eval_okay_test!(with);
eval_okay_test!(#[ignore = "not implemented: hashString"] zipAttrsWith); eval_okay_test!(
#[ignore = "not implemented: hashString"]
zipAttrsWith
);
eval_fail_test!(fail_abort); eval_fail_test!(fail_abort);
eval_fail_test!(fail_addDrvOutputDependencies_empty_context); eval_fail_test!(fail_addDrvOutputDependencies_empty_context);

1
nix-js/tests/lang/data Normal file
View File

@@ -0,0 +1 @@
foo