diff --git a/Cargo.lock b/Cargo.lock index ae6ac408..5c872b5c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2151,7 +2151,7 @@ dependencies = [ [[package]] name = "kit" -version = "0.8.4" +version = "0.8.5" dependencies = [ "alloy", "alloy-sol-macro", @@ -2168,6 +2168,7 @@ dependencies = [ "hex", "kinode_process_lib", "nix 0.27.1", + "proc-macro2", "regex", "reqwest", "rpassword", @@ -2175,8 +2176,11 @@ dependencies = [ "serde", "serde_json", "sha2", + "syn 2.0.75", + "thiserror", "tokio", "toml", + "toml_edit 0.22.20", "tracing", "tracing-appender", "tracing-error", diff --git a/Cargo.toml b/Cargo.toml index f15de3b6..07ce7e94 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "kit" -version = "0.8.4" +version = "0.8.5" edition = "2021" [build-dependencies] @@ -39,6 +39,7 @@ fs-err = "2.11" hex = "0.4" kinode_process_lib = { git = "https://github.com/kinode-dao/process_lib.git", rev = "9ac9e51" } nix = { version = "0.27", features = ["process", "signal", "term"] } +proc-macro2 = "1.0" regex = "1" reqwest = { version = "0.12", features = ["json"] } rpassword = "7" @@ -46,6 +47,9 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" +syn = { version = "2.0", features = ["full", "visit", "extra-traits"] } +#syn = { version = "2.0", features = ["full", "visit"] } +thiserror = "1.0" tokio = { version = "1.28", features = [ "macros", "process", @@ -55,6 +59,7 @@ tokio = { version = "1.28", features = [ "time", ] } toml = "0.8" +toml_edit = "0.22" tracing = "0.1" tracing-appender = "0.2" tracing-error = "0.2" diff --git a/README.md b/README.md index dd99e133..cfa9446f 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Tool**kit** for developing on [Kinode OS](https://github.com/kinode-dao/kinode). -Documentation in the [Kinode Book](https://book.kinode.org/kit-dev-toolkit.html); example usage [here](https://book.kinode.org/my_first_app/chapter_1.html). +Documentation in the [Kinode Book](https://book.kinode.org/kit/kit-dev-toolkit.html); example usage [here](https://book.kinode.org/my_first_app/chapter_1.html). ## Installing @@ -71,7 +71,10 @@ kit boot-fake-node --runtime-path ~/git/kinode ``` `kit` also contains tools for running tests. -For details and examples, please see [https://github.com/kinode-dao/core_tests](https://github.com/kinode-dao/core_tests). +For details and examples, please see +1. [Kinode Book's example code](https://github.com/kinode-dao/kinode-book/tree/main/code). +2. `kit`s templates, available through `kit new` or [here](https://github.com/kinode-dao/kit/tree/master/src/new/templates/rust). +3. [https://github.com/kinode-dao/core_tests](https://github.com/kinode-dao/core_tests). ## UI Development diff --git a/src/build/mod.rs b/src/build/mod.rs index 1d19e8cc..c24c3c64 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -29,6 +29,9 @@ use crate::setup::{ use crate::view_api; use crate::KIT_CACHE; +mod rewrite; +use rewrite::copy_and_rewrite_package; + const PY_VENV_NAME: &str = "process_env"; const JAVASCRIPT_SRC_PATH: &str = "src/lib.js"; const PYTHON_SRC_PATH: &str = "src/lib.py"; @@ -1155,6 +1158,7 @@ async fn fetch_dependencies( default_world: Option<&str>, include: &HashSet, exclude: &HashSet, + rewrite: bool, force: bool, verbose: bool, ) -> Result<()> { @@ -1171,6 +1175,7 @@ async fn fetch_dependencies( default_world, vec![], // TODO: what about deps-of-deps? vec![], + rewrite, false, force, verbose, @@ -1207,6 +1212,7 @@ async fn fetch_dependencies( default_world, local_dep_deps, vec![], + rewrite, false, force, verbose, @@ -1522,6 +1528,7 @@ async fn compile_package( add_paths_to_api: &Vec, include: &HashSet, exclude: &HashSet, + rewrite: bool, force: bool, verbose: bool, ignore_deps: bool, // for internal use; may cause problems when adding recursive deps @@ -1544,6 +1551,7 @@ async fn compile_package( default_world, include, exclude, + rewrite, force, verbose, ) @@ -1651,6 +1659,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, + rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -1734,7 +1743,16 @@ pub async fn execute( check_process_lib_version(&package_dir.join("Cargo.toml"))?; - let ui_dirs = get_ui_dirs(package_dir, &include, &exclude)?; + // live_dir is the "dir that is being built" or is "live"; + // if `!rewrite`, that is just `package_dir`; + // else, it is the modified copy that is in `target/rewrite/` + let live_dir = if !rewrite { + PathBuf::from(package_dir) + } else { + copy_and_rewrite_package(package_dir)? + }; + + let ui_dirs = get_ui_dirs(&live_dir, &include, &exclude)?; if !no_ui && !ui_dirs.is_empty() { if !skip_deps_check { let mut recv_kill = make_fake_kill_chan(); @@ -1749,7 +1767,7 @@ pub async fn execute( if !ui_only { compile_package( - package_dir, + &live_dir, skip_deps_check, features, url, @@ -1759,6 +1777,7 @@ pub async fn execute( &add_paths_to_api, &include, &exclude, + rewrite, force, verbose, ignore_deps, @@ -1766,6 +1785,13 @@ pub async fn execute( .await?; } + if rewrite { + if package_dir.join("pkg").exists() { + fs::remove_dir_all(package_dir.join("pkg"))?; + } + copy_dir(live_dir.join("pkg"), package_dir.join("pkg"))?; + } + let metadata = read_metadata(package_dir)?; let pkg_publisher = make_pkg_publisher(&metadata); let (_zip_filename, hash_string) = zip_pkg(package_dir, &pkg_publisher)?; diff --git a/src/build/rewrite.rs b/src/build/rewrite.rs new file mode 100644 index 00000000..e43c4508 --- /dev/null +++ b/src/build/rewrite.rs @@ -0,0 +1,962 @@ +use std::collections::{HashMap, HashSet}; +use std::path::{Path, PathBuf}; + +use color_eyre::{eyre::eyre, Result}; +use fs_err as fs; +use regex::Regex; +use syn::{ + __private::ToTokens, + parse_str, + visit::{self, Visit}, +}; +use toml_edit; +use tracing::{debug, instrument}; + +use crate::new::snake_to_upper_camel_case; + +#[derive(Debug, Default)] +struct GeneratedProcesses { + // original process name -> (generated process name -> (wasm path, content)) + processes: HashMap>, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +struct GeneratedProcessesExternal { + // original process name -> (generated process name -> wasm path) + processes: HashMap>, +} + +impl From for GeneratedProcessesExternal { + fn from(input: GeneratedProcesses) -> Self { + let processes = input + .processes + .iter() + .map(|(parent_name, child_to_content)| { + ( + parent_name.to_string(), + child_to_content + .iter() + .map(|(child_name, (path, _content))| { + (child_name.to_string(), path.to_string()) + }) + .collect(), + ) + }) + .collect(); + GeneratedProcessesExternal { processes } + } +} + +#[derive(Debug, Clone)] +struct ArgInfo { + name: String, + ty: String, +} + +#[derive(Debug, Clone)] +struct ReturnInfo { + ty: String, +} + +#[derive(Debug, Clone)] +struct FnSignature { + args: Vec, + ret: Option, +} + +#[derive(Debug)] +struct FnInfo { + name: String, + signature: FnSignature, + body: String, + dependencies: HashSet, +} + +#[derive(Debug)] +enum SpawnType { + Closure { args: String, body: String }, + FnCall { name: String, args: Vec }, +} + +#[derive(Debug)] +struct SpawnMatch { + spawn_type: SpawnType, + imports: Vec, + start_pos: usize, + end_pos: usize, +} + +#[derive(Debug, thiserror::Error)] +enum SpawnParseError { + #[error("Parse failed due to malformed imports")] + Imports, + #[error("Spawn parse failed due to malformed closure: no closing pipe in closure")] + NoClosingPipe, + #[error("Spawn parse failed due to malformed closure: no opening brace `{{`")] + NoOpeningBrace, + #[error("Spawn parse failed due to malformed closure: no opening paren `(`")] + NoOpeningParen, + #[error("Spawn parse failed due to malformed closure: no opening bracket `[`")] + NoOpeningBracket, + #[error("Spawn parse failed due to malformed closure: unclosed brace `{{`")] + UnclosedBrace, + #[error("Spawn parse failed due to malformed closure: unclosed paren `(`")] + UnclosedParen, + #[error("Spawn parse failed due to malformed closure: unclosed bracket` `[`")] + UnclosedBracket, + #[error("Spawn parse failed: malformed function call")] + MalformedFunctionCall, + #[error("Spawn parse failed: no opening paren for arguments")] + UnclosedArgsParen, + #[error("Spawn parse failed: unclosed spawn paren")] + UnclosedSpawnParen, + #[error("Spawn parse failed: must start with `Spawn!(`")] + InvalidSpawnSyntax, +} + +// TODO: factor out with build::mod.rs::copy_dir() +#[instrument(level = "trace", skip_all)] +fn copy_dir(src: impl AsRef, dst: impl AsRef) -> Result<()> { + let src = src.as_ref(); + let dst = dst.as_ref(); + if !dst.exists() { + fs::create_dir_all(dst)?; + } + + for entry in fs::read_dir(src)? { + let entry = entry?; + let src_path = entry.path(); + let dst_path = dst.join(entry.file_name()); + + if src_path.is_dir() { + if src_path.file_name().and_then(|n| n.to_str()) == Some("target") { + continue; + } + copy_dir(&src_path, &dst_path)?; + } else { + fs::copy(&src_path, &dst_path)?; + } + } + Ok(()) +} + +fn make_args_struct_name(worker_name: &str) -> String { + format!( + "{}Args", + snake_to_upper_camel_case(&worker_name.replace("-", "_")) + ) +} + +#[instrument(level = "trace", skip_all)] +fn parse_fn_signature(args: &str) -> Result { + // Parse the argument string as Rust function parameters + let fn_item: syn::ItemFn = parse_str(&format!("fn dummy({args}) {{}}"))?; + + // Extract the parameters from the function signature + let args = fn_item + .sig + .inputs + .into_iter() + .filter_map(|param| { + if let syn::FnArg::Typed(pat_type) = param { + Some(ArgInfo { + name: pat_type.pat.into_token_stream().to_string(), + ty: pat_type.ty.into_token_stream().to_string(), + }) + } else { + None + } + }) + .collect(); + + // Extract return type if present + let ret = match fn_item.sig.output { + syn::ReturnType::Default => None, + syn::ReturnType::Type(_, ty) => Some(ReturnInfo { + ty: ty.into_token_stream().to_string(), + }), + }; + + Ok(FnSignature { args, ret }) +} + +fn generate_args_struct_type(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| format!(" {}: {},", arg.name, arg.ty)) + .collect::>() + .join("\n"); + + format!( + r#"#[derive(serde::Serialize, serde::Deserialize)] +struct {struct_name} {{ +{fields} +}}"# + ) +} + +fn generate_args_struct_instance(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| format!(" {0}: {0}.clone(),", arg.name)) + .collect::>() + .join("\n"); + + format!( + r#"let args = {struct_name} {{ +{fields} + }};"# + ) +} + +fn generate_args_struct_destructure(struct_name: &str, args: &[ArgInfo]) -> String { + let fields = args + .iter() + .map(|arg| arg.name.clone()) + .collect::>() + .join(", "); + + format!(r#"let {struct_name} {{ {fields} }}"#) +} + +fn extract_imports(content: &str) -> Result, SpawnParseError> { + let imports_re = Regex::new(r"use\s+([^;]+);").map_err(|_| SpawnParseError::Imports)?; + Ok(imports_re + .captures_iter(content) + .map(|cap| cap[1].trim().to_string()) + .collect()) +} + +fn extract_wit_bindgen(content: &str) -> Option { + // Look for wit_bindgen::generate! macro + if let Some(start) = content.find("wit_bindgen::generate!") { + let mut brace_count = 0; + let mut in_macro = false; + let mut saw_closing_brace = false; + let mut saw_closing_paren = false; + let mut macro_end = start; + + // Find the closing part of the macro by counting braces + for (i, c) in content[start..].chars().enumerate() { + match c { + '{' => { + brace_count += 1; + in_macro = true; + } + '}' => { + brace_count -= 1; + if in_macro && brace_count == 0 { + saw_closing_brace = true; + } + } + ')' => { + if in_macro && saw_closing_brace && brace_count == 0 { + saw_closing_paren = true; + } + } + ';' => { + if in_macro && saw_closing_brace && saw_closing_paren && brace_count == 0 { + macro_end = start + i + 1; + break; + } + } + _ => {} + } + } + + Some(content[start..macro_end].to_string()) + } else { + None + } +} + +#[instrument(level = "trace", skip_all)] +fn extract_functions(content: &str) -> Result> { + let syntax_tree = syn::parse_file(content)?; + let mut functions = HashMap::new(); + + for item in syntax_tree.items { + if let syn::Item::Fn(func) = item { + let name = func.sig.ident.to_string(); + // Extract both args and return type + let signature = FnSignature { + args: func + .sig + .inputs + .iter() + .filter_map(|arg| { + if let syn::FnArg::Typed(pat_type) = arg { + Some(ArgInfo { + name: pat_type.pat.to_token_stream().to_string(), + ty: pat_type.ty.to_token_stream().to_string(), + }) + } else { + None + } + }) + .collect(), + ret: match &func.sig.output { + syn::ReturnType::Default => None, + syn::ReturnType::Type(_, ty) => Some(ReturnInfo { + ty: ty.into_token_stream().to_string(), + }), + }, + }; + + let mut deps = HashSet::new(); + find_fn_calls(&func.block, &mut deps); + + functions.insert( + name.clone(), + FnInfo { + name, + signature, + body: func.block.to_token_stream().to_string(), + dependencies: deps, + }, + ); + } + } + + Ok(functions) +} + +fn find_fn_calls(block: &syn::Block, deps: &mut HashSet) { + fn inspect_expr(expr: &syn::Expr, deps: &mut HashSet) { + match expr { + syn::Expr::Call(call) => { + // Check direct function call + if let syn::Expr::Path(path) = &*call.func { + if let Some(ident) = path.path.get_ident() { + deps.insert(ident.to_string()); + } + } + // Check arguments recursively + for arg in &call.args { + inspect_expr(arg, deps); + } + } + syn::Expr::Macro(mac) => { + // Convert tokens to string and look for function calls + let tokens = mac.mac.tokens.clone(); + let tokens_str = tokens.to_string(); + + // Split on comma and look at each part + for part in tokens_str.split(',') { + // Look for function call pattern: function_name(args) + if let Some(func_name) = part.trim().split('(').next() { + // Ignore format specifiers and other non-function tokens + if !func_name.contains('"') && !func_name.is_empty() { + deps.insert(func_name.trim().to_string()); + } + } + } + + // Still try to parse as expression for other cases + if let Ok(expr) = syn::parse2::(tokens) { + inspect_expr(&expr, deps); + } + } + syn::Expr::Block(block_expr) => { + for stmt in &block_expr.block.stmts { + inspect_stmt(stmt, deps); + } + } + _ => {} + } + } + + fn inspect_stmt(stmt: &syn::Stmt, deps: &mut HashSet) { + match stmt { + syn::Stmt::Expr(expr, _) => inspect_expr(expr, deps), + syn::Stmt::Local(local) => { + if let Some(init) = &local.init { + inspect_expr(&init.expr, deps); + } + } + syn::Stmt::Macro(mac_stmt) => { + if let Ok(expr) = syn::parse2::(mac_stmt.mac.tokens.clone()) { + inspect_expr(&expr, deps); + } else { + // Handle tokens directly for macro statements too + let tokens_str = mac_stmt.mac.tokens.to_string(); + for part in tokens_str.split(',') { + if let Some(func_name) = part.trim().split('(').next() { + if !func_name.contains('"') && !func_name.is_empty() { + deps.insert(func_name.trim().to_string()); + } + } + } + } + } + _ => {} + } + } + + for stmt in &block.stmts { + inspect_stmt(stmt, deps); + } +} + +fn parse_spawn_closure(input: &str) -> Result<(String, String, usize), SpawnParseError> { + // Skip the "Spawn!(|" prefix since we know it's there + let input_after_spawn = &input["Spawn!(|".len()..]; + + // Find the closing "|" + let pipe_end = input_after_spawn + .find('|') + .ok_or(SpawnParseError::NoClosingPipe)?; + + // Find the opening "{" + let brace_start = input_after_spawn[pipe_end..] + .find('{') + .ok_or(SpawnParseError::NoOpeningBrace)? + .saturating_add(pipe_end); + + // Find the closing "}" while handling nested braces + let mut brace_count = 1; + let mut brace_end = None; + let mut paren_end = None; + + for (i, c) in input_after_spawn[brace_start + 1..].chars().enumerate() { + match c { + '{' => brace_count += 1, + '}' => { + brace_count -= 1; + if brace_count == 0 { + brace_end = Some(brace_start + 1 + i); + } + } + ')' => { + if brace_count == 0 && brace_end.is_some() { + paren_end = Some(brace_start + 1 + i); + break; + } + } + _ => {} + } + } + + let brace_end = brace_end.ok_or(SpawnParseError::UnclosedBrace)?; + let paren_end = paren_end.ok_or(SpawnParseError::UnclosedParen)?; + + let args = input_after_spawn[..pipe_end].trim().to_string(); + let body = input_after_spawn[brace_start + 1..brace_end] + .trim() + .to_string(); + + // Return the total length consumed so we know where to continue searching + let total_consumed = "Spawn!(|".len() + paren_end + 1; + + Ok((args, body, total_consumed)) +} + +fn parse_spawn_fn_call(input: &str) -> Result<(String, Vec, usize), SpawnParseError> { + // Skip the "Spawn!(" prefix + let input_after_spawn = &input["Spawn!(".len()..]; + + // Find the function name (everything up to first '(' or whitespace) + let name_end = input_after_spawn + .find(|c: char| c == '(' || c.is_whitespace()) + .ok_or(SpawnParseError::MalformedFunctionCall)?; + let name = input_after_spawn[..name_end].trim().to_string(); + + // Find opening paren of args + let args_start = input_after_spawn[name_end..] + .find('(') + .ok_or(SpawnParseError::NoOpeningParen)? + .saturating_add(name_end); + + // Find closing paren while handling nested parens + let mut paren_count = 1; + let mut args_end = None; + let mut closing_spawn_paren = None; + + for (i, c) in input_after_spawn[args_start + 1..].chars().enumerate() { + match c { + '(' => paren_count += 1, + ')' => { + paren_count -= 1; + if paren_count == 0 { + args_end = Some(args_start + 1 + i); + } else if paren_count == -1 { + // This is the closing paren of Spawn!(...) + closing_spawn_paren = Some(args_start + 1 + i); + break; + } + } + _ => {} + } + } + + let args_end = args_end.ok_or(SpawnParseError::UnclosedArgsParen)?; + let closing_spawn_paren = closing_spawn_paren.ok_or(SpawnParseError::UnclosedSpawnParen)?; + + // Parse args list by splitting on commas, handling nested stuff + let args_str = input_after_spawn[args_start + 1..args_end].trim(); + let args = split_args(args_str)?; + + // Return total consumed length including both closing parens + let total_consumed = "Spawn!(".len() + closing_spawn_paren + 1; + + Ok((name, args, total_consumed)) +} + +fn split_args(args: &str) -> Result, SpawnParseError> { + let mut result = Vec::new(); + let mut current = String::new(); + let mut paren_count = 0; + let mut brace_count = 0; + let mut bracket_count = 0; + + for c in args.chars() { + match c { + '(' => paren_count += 1, + ')' => paren_count -= 1, + '{' => brace_count += 1, + '}' => brace_count -= 1, + '[' => bracket_count += 1, + ']' => bracket_count -= 1, + ',' if paren_count == 0 && brace_count == 0 && bracket_count == 0 => { + result.push(current.trim().to_string()); + current = String::new(); + continue; + } + _ => {} + } + current.push(c); + } + + if !current.is_empty() { + result.push(current.trim().to_string()); + } + + if paren_count != 0 { + return Err(SpawnParseError::UnclosedParen); + } + if brace_count != 0 { + return Err(SpawnParseError::UnclosedBrace); + } + if bracket_count != 0 { + return Err(SpawnParseError::UnclosedBracket); + } + + Ok(result) +} + +fn parse_spawn_from(input: &str) -> Result<(SpawnType, usize), SpawnParseError> { + if input.starts_with("Spawn!(|") { + // Existing closure parsing logic + let (args, body, consumed) = parse_spawn_closure(&input)?; + Ok((SpawnType::Closure { args, body }, consumed)) + } else if input.starts_with("Spawn!(") { + // Function call parsing logic + debug!("parsing non-closure `Spawn!(`"); + let (name, args, consumed) = parse_spawn_fn_call(&input)?; + Ok((SpawnType::FnCall { name, args }, consumed)) + } else { + Err(SpawnParseError::InvalidSpawnSyntax) + } +} + +fn add_function_and_deps( + name: &str, + functions: &HashMap, + needed: &mut HashSet, +) { + needed.insert(name.to_string()); + if let Some(info) = functions.get(name) { + for dep in &info.dependencies { + add_function_and_deps(dep, functions, needed); + } + } +} + +#[instrument(level = "trace", skip_all)] +fn generate_worker_process( + process_name: &str, + spawn_match: &SpawnMatch, + functions: &HashMap, + imports: &[String], + wit_bindgen: &str, + args_type: &str, + args_destructure: &str, +) -> Result { + let mut needed_fns = HashSet::new(); + + // Get return type if it's a function call + let return_type = match &spawn_match.spawn_type { + SpawnType::FnCall { name, .. } => { + if let Some(fn_info) = functions.get(name) { + fn_info.signature.ret.clone() + } else { + None + } + } + SpawnType::Closure { .. } => None, // Closures don't have return types in our context + }; + + // Get list of functions we need to copy + match &spawn_match.spawn_type { + SpawnType::Closure { body, .. } => { + // Parse body to find function calls + // Add braces back before parsing + let block_str = format!("{{{body}}}"); + let syntax_tree = syn::parse_str::(&block_str)?; + // First find direct function calls in the closure + find_fn_calls(&syntax_tree, &mut needed_fns); + debug!("generate_worker_process find_fn_calls needed_fns {needed_fns:?}"); + debug!("{:?}", functions.keys().collect::>()); + // Then recursively add dependencies for each function found + let direct_deps = needed_fns.clone(); // Clone before recursive traversal + debug!("{direct_deps:?}"); + for name in direct_deps { + add_function_and_deps(&name, functions, &mut needed_fns); + } + } + SpawnType::FnCall { name, .. } => { + // Add the called function and its dependencies + debug!("fncall {name}"); + debug!("{:?}", functions.keys().collect::>()); + add_function_and_deps(name, functions, &mut needed_fns); + } + } + debug!("generate_worker_process found deps: {needed_fns:?}"); + + let imports = imports + .iter() + .map(|i| format!("#[allow(unused_imports)]\nuse {i};\n")) + .collect::(); + + // Generate function definitions preserving return types + let function_definitions = needed_fns + .iter() + .filter_map(|name| functions.get(name)) + .map(|info| { + let ret_type = info + .signature + .ret + .as_ref() + .map_or("".to_string(), |r| format!(" -> {}", r.ty)); + format!( + "fn {}({}){}{}", + info.name, + info.signature + .args + .iter() + .map(|arg| format!("{}: {}", arg.name, arg.ty)) + .collect::>() + .join(", "), + ret_type, + info.body, + ) + }) + .collect::(); + + debug!("{function_definitions}"); + + let body = match &spawn_match.spawn_type { + SpawnType::Closure { body, .. } => format!("{body};"), + SpawnType::FnCall { name, args } => { + format!("{name}({});", args.join(", ")) + } + }; + + let template = format!( + r#"// Generated worker process for {process_name} +{imports} + +{wit_bindgen} + +{args_type} + +{function_definitions} + +call_init!(init); +fn init(our: Address) {{ + // Get args from parent + let message = await_message().expect("Failed to get args from parent"); + {args_destructure} = serde_json::from_slice(&message.body()).unwrap(); + + // Execute `Spawn!()` function body + {body} +}} +"#, + ); + + Ok(template) +} + +fn find_all_spawns(input: &str) -> Result, SpawnParseError> { + let mut results = Vec::new(); + let mut search_from = 0; + let imports = extract_imports(input)?; + + while let Some(spawn_start) = input[search_from..].find("Spawn!(") { + let absolute_start = search_from + spawn_start; + + let (spawn_type, consumed_len) = parse_spawn_from(&input[absolute_start..])?; + + results.push(SpawnMatch { + spawn_type, + imports: imports.clone(), + start_pos: absolute_start, + end_pos: absolute_start + consumed_len, + }); + + search_from = absolute_start + consumed_len; + } + + Ok(results) +} + +/// Rewrites the parent and stores information +/// for writing children in GeneratedProcess. +#[instrument(level = "trace", skip_all)] +fn rewrite_rust_file( + process_name: &str, + content: &str, + generated: &mut GeneratedProcesses, +) -> Result { + let spawn_matches = find_all_spawns(content)?; + let mut new_content = content.to_string(); + let functions = extract_functions(&content)?; + debug!("got functions in {process_name}: {:#?}", functions); + + // Process spawns in reverse order to not invalidate positions + for (i, spawn_match) in spawn_matches.iter().enumerate().rev() { + let worker_name = format!("{process_name}-worker-{i}"); + let wasm_name = format!("{worker_name}.wasm"); + + let (args_name, parsed_signature) = match &spawn_match.spawn_type { + SpawnType::Closure { args, .. } => { + let args_name = make_args_struct_name(&worker_name); + let parsed_signature = parse_fn_signature(args)?; + (args_name, parsed_signature) + } + SpawnType::FnCall { name, args } => { + let fn_info = functions + .get(name) + .ok_or_else(|| eyre!("Function {name} not found in parent"))?; + // For function calls, use the function's argument names but + // with the values supplied in the Spawn!() call + let args_name = make_args_struct_name(&worker_name); + (args_name, fn_info.signature.clone()) + } + }; + + let args_type = generate_args_struct_type(&args_name, &parsed_signature.args); + let args_destructure = generate_args_struct_destructure(&args_name, &parsed_signature.args); + + // Generate worker process + let wit_bindgen = extract_wit_bindgen(content).unwrap_or_else(|| { + r#"wit_bindgen::generate!({ + path: "target/wit", + world: "process-v0", +})"# + .to_string() + }); + + let worker_code = generate_worker_process( + process_name, + spawn_match, + &functions, + &spawn_match.imports, + &wit_bindgen, + &args_type, + &args_destructure, + )?; + + // Track in generated processes + generated + .processes + .entry(process_name.to_string()) + .or_default() + .insert(worker_name.clone(), (wasm_name, worker_code)); + + // Create replacement spawn code with appropriate args instantiation + let args_instance = match &spawn_match.spawn_type { + SpawnType::Closure { args, .. } => { + // For closures, use the argument names directly + generate_args_struct_instance(&args_name, &parsed_signature.args) + } + SpawnType::FnCall { args, .. } => { + // For function calls, use the supplied argument values + let fields = parsed_signature + .args + .iter() + .zip(args.iter()) + .map(|(arg, value)| format!(" {}: {},", arg.name, value)) + .collect::>() + .join("\n"); + + format!( + r#"let args = {args_name} {{ +{fields} + }};"# + ) + } + }; + + // Create the replacement code + let replacement = format!( + r#"{{ + use kinode_process_lib::{{spawn, OnExit, Request}}; + {args_type} + + {args_instance} + + let worker = spawn( + None, + &format!("{{}}:{{}}/pkg/{worker_name}.wasm", our.process.package_name, our.process.publisher_node), + OnExit::None, + vec![], + vec![], + false, + ).expect("failed to spawn worker"); + Request::to((our.node(), worker)) + .body(serde_json::to_vec(&args).unwrap()) + .send() + .expect("failed to initialize worker"); + }}"#, + ); + + // Replace in the content using positions + new_content.replace_range(spawn_match.start_pos..spawn_match.end_pos, &replacement); + } + + Ok(new_content) +} + +/// For each process in package, rewrite rust files parents +/// and store information for writing children in GeneratedProcess. +#[instrument(level = "trace", skip_all)] +fn process_package(package_dir: &Path, generated: &mut GeneratedProcesses) -> Result<()> { + if !package_dir.is_dir() { + return Ok(()); + } + + for entry in fs::read_dir(package_dir)? { + let entry = entry?; + let path = entry.path(); + + if path.is_dir() { + if path.file_name().and_then(|n| n.to_str()) == Some("target") { + continue; + } + process_package(&path, generated)?; + } else if path.extension().and_then(|s| s.to_str()) == Some("rs") { + let process_name = path + .parent() + .and_then(|p| p.parent()) + .and_then(|n| n.file_name()) + .and_then(|n| n.to_str()) + .ok_or_else(|| eyre!("Invalid process name"))? + .to_string(); + + let content = fs::read_to_string(&path)?; + let new_content = rewrite_rust_file(&process_name, &content, generated)?; + fs::write(&path, new_content)?; + crate::build::run_command(std::process::Command::new("rustfmt").arg(&path), false)?; + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn create_child_processes(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + for (process_name, workers) in &generated.processes { + for (worker_name, (_, content)) in workers { + let parent_dir = package_dir.join(process_name); + let worker_dir = package_dir.join(worker_name); + + // Copy the source directory structure from parent + let parent_src = parent_dir.join("src"); + let worker_src = worker_dir.join("src"); + debug!("{} {}", parent_src.display(), worker_src.display()); + copy_dir(&parent_src, &worker_src)?; + + // Overwrite lib.rs with our generated content + let worker_lib = worker_src.join("lib.rs"); + fs::write(&worker_lib, content)?; + crate::build::run_command( + std::process::Command::new("rustfmt").arg(&worker_lib), + false, + )?; + + // Copy and modify Cargo.toml + let parent_cargo = fs::read_to_string(parent_dir.join("Cargo.toml"))?; + let mut doc = parent_cargo.parse::()?; + + // Update package name to worker name + if let Some(package) = doc.get_mut("package") { + if let Some(name) = package.get_mut("name") { + *name = toml_edit::value(worker_name.as_str()); + } + } + + fs::write(worker_dir.join("Cargo.toml"), doc.to_string())?; + } + } + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +fn update_workspace_cargo_toml(package_dir: &Path, generated: &GeneratedProcesses) -> Result<()> { + let cargo_toml_path = package_dir.join("Cargo.toml"); + let cargo_toml = fs::read_to_string(&cargo_toml_path)?; + + // Parse existing TOML + let mut doc = cargo_toml.parse::()?; + + // Get or create workspace section + let workspace = doc.entry("workspace").or_insert(toml_edit::table()); + + // Get or create members array + let members = workspace + .as_table_mut() + .ok_or_else(|| eyre!("workspace is not a table"))? + .entry("members") + .or_insert(toml_edit::array()); + + let members_array = members + .as_array_mut() + .ok_or_else(|| eyre!("members is not an array"))?; + + // Add all worker packages + for workers in generated.processes.values() { + for worker_name in workers.keys() { + members_array.push(worker_name); + } + } + + // Write back to file + fs::write(cargo_toml_path, doc.to_string())?; + + Ok(()) +} + +#[instrument(level = "trace", skip_all)] +pub fn copy_and_rewrite_package(package_dir: &Path) -> Result { + debug!("Rewriting for {}...", package_dir.display()); + let rewrite_dir = package_dir.join("target").join("rewrite"); + if rewrite_dir.exists() { + fs::remove_dir_all(&rewrite_dir)?; + } + fs::create_dir_all(&rewrite_dir)?; + + copy_dir(package_dir, &rewrite_dir)?; + + let mut generated = GeneratedProcesses::default(); + + // Rewrite parents & gather info for writing children + process_package(&rewrite_dir, &mut generated)?; + + // Create child processes + create_child_processes(&rewrite_dir, &generated)?; + + // Update workspace Cargo.toml + update_workspace_cargo_toml(&rewrite_dir, &generated)?; + + Ok(rewrite_dir) +} diff --git a/src/build_start_package/mod.rs b/src/build_start_package/mod.rs index 2a29546a..d3707196 100644 --- a/src/build_start_package/mod.rs +++ b/src/build_start_package/mod.rs @@ -21,6 +21,7 @@ pub async fn execute( default_world: Option<&str>, local_dependencies: Vec, add_paths_to_api: Vec, + rewrite: bool, reproducible: bool, force: bool, verbose: bool, @@ -38,6 +39,7 @@ pub async fn execute( default_world, local_dependencies, add_paths_to_api, + rewrite, reproducible, force, verbose, diff --git a/src/main.rs b/src/main.rs index f111bfc4..f148e74a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -222,6 +222,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); + let rewrite = matches.get_one::("REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -239,6 +240,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, + *rewrite, *reproducible, *force, *verbose, @@ -283,6 +285,7 @@ async fn execute( .unwrap_or_default() .map(|s| PathBuf::from(s)) .collect(); + let rewrite = matches.get_one::("REWRITE").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -300,6 +303,7 @@ async fn execute( default_world.map(|w| w.as_str()), local_dependencies, add_paths_to_api, + *rewrite, *reproducible, *force, *verbose, @@ -733,6 +737,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .long("add-to-api") .help("Path to file to add to api.zip (can specify multiple times)") ) + .arg(Arg::new("REWRITE") + .action(ArgAction::SetTrue) + .long("rewrite") + .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') @@ -834,6 +844,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .help("Pass these comma-delimited feature flags to Rust cargo builds") .required(false) ) + .arg(Arg::new("REWRITE") + .action(ArgAction::SetTrue) + .long("no-rewrite") + .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') diff --git a/src/new/mod.rs b/src/new/mod.rs index 681cbf41..60226f47 100644 --- a/src/new/mod.rs +++ b/src/new/mod.rs @@ -73,7 +73,7 @@ impl From<&String> for Template { } } -fn snake_to_upper_camel_case(input: &str) -> String { +pub fn snake_to_upper_camel_case(input: &str) -> String { let parts: Vec<&str> = input.split('_').collect(); let mut camel_case = String::new(); diff --git a/src/new/templates/rust/no-ui/chat/test/chat-test/api/chat_test:template.os-v0.wit b/src/new/templates/rust/no-ui/chat/test/chat-test/api/chat-test:template.os-v0.wit similarity index 100% rename from src/new/templates/rust/no-ui/chat/test/chat-test/api/chat_test:template.os-v0.wit rename to src/new/templates/rust/no-ui/chat/test/chat-test/api/chat-test:template.os-v0.wit diff --git a/src/new/templates/rust/no-ui/echo/test/echo-test/api/echo_test:template.os-v0.wit b/src/new/templates/rust/no-ui/echo/test/echo-test/api/echo-test:template.os-v0.wit similarity index 100% rename from src/new/templates/rust/no-ui/echo/test/echo-test/api/echo_test:template.os-v0.wit rename to src/new/templates/rust/no-ui/echo/test/echo-test/api/echo-test:template.os-v0.wit diff --git a/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/api/fibonacci_test:template.os-v0.wit b/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/api/fibonacci-test:template.os-v0.wit similarity index 100% rename from src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/api/fibonacci_test:template.os-v0.wit rename to src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/api/fibonacci-test:template.os-v0.wit diff --git a/src/run_tests/mod.rs b/src/run_tests/mod.rs index 22079033..94b9a3b0 100644 --- a/src/run_tests/mod.rs +++ b/src/run_tests/mod.rs @@ -382,6 +382,7 @@ async fn build_packages( false, false, false, + false, ) .await?; debug!("Start {path:?}"); @@ -406,6 +407,7 @@ async fn build_packages( false, false, false, + false, ) .await?; } @@ -427,6 +429,7 @@ async fn build_packages( false, false, false, + false, ) .await?; }