From 4ca86028ed47acc28df12da903fb598b98ca5f45 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Mon, 18 Nov 2024 11:31:31 +0100 Subject: [PATCH 1/9] :art: - More lenient parsing of config --- src/bsconfig.rs | 107 +++++++++++++++++++++++------------------- src/build/packages.rs | 2 +- src/helpers.rs | 10 ++++ 3 files changed, 71 insertions(+), 48 deletions(-) diff --git a/src/bsconfig.rs b/src/bsconfig.rs index 17cff891..7a6a28f1 100644 --- a/src/bsconfig.rs +++ b/src/bsconfig.rs @@ -1,4 +1,5 @@ use crate::build::packages; +use crate::helpers::deserialize::*; use convert_case::{Case, Casing}; use serde::Deserialize; use std::fs; @@ -27,45 +28,6 @@ pub struct PackageSource { pub type_: Option, } -/// `to_qualified_without_children` takes a tree like structure of dependencies, coming in from -/// `bsconfig`, and turns it into a flat list. The main thing we extract here are the source -/// folders, and optional subdirs, where potentially, the subdirs recurse or not. -pub fn to_qualified_without_children(s: &Source, sub_path: Option) -> PackageSource { - match s { - Source::Shorthand(dir) => PackageSource { - dir: sub_path - .map(|p| p.join(Path::new(dir))) - .unwrap_or(Path::new(dir).to_path_buf()) - .to_string_lossy() - .to_string(), - subdirs: None, - type_: s.get_type(), - }, - Source::Qualified(PackageSource { - dir, - type_, - subdirs: Some(Subdirs::Recurse(should_recurse)), - }) => PackageSource { - dir: sub_path - .map(|p| p.join(Path::new(dir))) - .unwrap_or(Path::new(dir).to_path_buf()) - .to_string_lossy() - .to_string(), - subdirs: Some(Subdirs::Recurse(*should_recurse)), - type_: type_.to_owned(), - }, - Source::Qualified(PackageSource { dir, type_, .. }) => PackageSource { - dir: sub_path - .map(|p| p.join(Path::new(dir))) - .unwrap_or(Path::new(dir).to_path_buf()) - .to_string_lossy() - .to_string(), - subdirs: None, - type_: type_.to_owned(), - }, - } -} - impl Eq for PackageSource {} #[derive(Deserialize, Debug, Clone, PartialEq, Hash)] @@ -97,6 +59,45 @@ impl Source { (source, _) => source.clone(), } } + + /// `to_qualified_without_children` takes a tree like structure of dependencies, coming in from + /// `bsconfig`, and turns it into a flat list. The main thing we extract here are the source + /// folders, and optional subdirs, where potentially, the subdirs recurse or not. + pub fn to_qualified_without_children(&self, sub_path: Option) -> PackageSource { + match self { + Source::Shorthand(dir) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: None, + type_: self.get_type(), + }, + Source::Qualified(PackageSource { + dir, + type_, + subdirs: Some(Subdirs::Recurse(should_recurse)), + }) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: Some(Subdirs::Recurse(*should_recurse)), + type_: type_.to_owned(), + }, + Source::Qualified(PackageSource { dir, type_, .. }) => PackageSource { + dir: sub_path + .map(|p| p.join(Path::new(dir))) + .unwrap_or(Path::new(dir).to_path_buf()) + .to_string_lossy() + .to_string(), + subdirs: None, + type_: type_.to_owned(), + }, + } + } } impl Eq for Source {} @@ -104,7 +105,7 @@ impl Eq for Source {} #[derive(Deserialize, Debug, Clone)] pub struct PackageSpec { pub module: String, - #[serde(rename = "in-source")] + #[serde(rename = "in-source", default = "default_true")] pub in_source: bool, pub suffix: Option, } @@ -122,10 +123,14 @@ pub struct Warnings { pub error: Option, } -#[derive(Deserialize, Debug, Clone)] -pub struct Reason { - #[serde(rename = "react-jsx")] - pub react_jsx: i32, +#[derive(Deserialize, Debug, Clone, PartialEq, Hash)] +#[serde(untagged)] +pub enum Reason { + Versioned { + #[serde(rename = "react-jsx")] + react_jsx: i32, + }, + Unversioned(bool), } #[derive(Deserialize, Debug, Clone)] @@ -262,6 +267,10 @@ pub fn flatten_ppx_flags( pub fn read(path: String) -> Config { fs::read_to_string(path.clone()) .map_err(|e| format!("Could not read bsconfig. {path} - {e}")) + // .and_then(|x| { + // dbg!(&x); + // repair(x).map_err(|e| format!("Json was invalid and could not be repaired. {path} - {e}")) + // }) .and_then(|x| { serde_json::from_str::(&x).map_err(|e| format!("Could not parse bsconfig. {path} - {e}")) }) @@ -332,8 +341,12 @@ impl Config { Some(_version) => panic!("Unsupported JSX version"), None => vec![], }, - (Some(reason), None) => { - vec!["-bs-jsx".to_string(), format!("{}", reason.react_jsx)] + (Some(Reason::Versioned { react_jsx }), None) => { + vec!["-bs-jsx".to_string(), format!("{}", react_jsx)] + } + (Some(Reason::Unversioned(true)), None) => { + // If Reason is 'true' - we should default to the latest + vec!["-bs-jsx".to_string()] } _ => vec![], } @@ -462,7 +475,7 @@ mod tests { let config = serde_json::from_str::(json).unwrap(); if let OneOrMore::Single(source) = config.sources { - let source = to_qualified_without_children(&source, None); + let source = source.to_qualified_without_children(None); assert_eq!(source.type_, Some(String::from("dev"))); } else { dbg!(config.sources); diff --git a/src/build/packages.rs b/src/build/packages.rs index 631f0b39..ce77d4c4 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -184,7 +184,7 @@ pub fn read_folders( fn get_source_dirs(source: bsconfig::Source, sub_path: Option) -> AHashSet { let mut source_folders: AHashSet = AHashSet::new(); - let source_folder = bsconfig::to_qualified_without_children(&source, sub_path.to_owned()); + let source_folder = source.to_qualified_without_children(sub_path.to_owned()); source_folders.insert(source_folder.to_owned()); let (subdirs, full_recursive) = match source.to_owned() { diff --git a/src/helpers.rs b/src/helpers.rs index f78aff93..c4d92a01 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -10,6 +10,16 @@ use std::time::{SystemTime, UNIX_EPOCH}; pub type StdErr = String; +pub mod deserialize { + pub fn default_false() -> bool { + false + } + + pub fn default_true() -> bool { + true + } +} + pub mod emojis { use console::Emoji; pub static COMMAND: Emoji<'_, '_> = Emoji("🏃 ", ""); From a86968cf873472c111512b784b7b236f270a6808 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Mon, 18 Nov 2024 11:34:52 +0100 Subject: [PATCH 2/9] :truck: - Rename bsconfig -> config --- src/build.rs | 6 +- src/build/build_types.rs | 1 + src/build/clean.rs | 2 +- src/build/compile.rs | 18 ++-- src/build/packages.rs | 157 ++++++++++++++++---------------- src/build/parse.rs | 20 ++-- src/build/read_compile_state.rs | 2 +- src/{bsconfig.rs => config.rs} | 0 src/helpers.rs | 6 +- src/lib.rs | 2 +- src/main.rs | 2 +- src/sourcedirs.rs | 6 +- 12 files changed, 114 insertions(+), 108 deletions(-) rename src/{bsconfig.rs => config.rs} (100%) diff --git a/src/build.rs b/src/build.rs index e61cfeca..c07b6872 100644 --- a/src/build.rs +++ b/src/build.rs @@ -57,12 +57,12 @@ pub struct CompilerArgs { pub fn get_compiler_args(path: &str, rescript_version: Option, bsc_path: Option) -> String { let filename = &helpers::get_abs_path(path); let package_root = helpers::get_abs_path( - &helpers::get_nearest_bsconfig(&std::path::PathBuf::from(path)).expect("Couldn't find package root"), + &helpers::get_nearest_config(&std::path::PathBuf::from(path)).expect("Couldn't find package root"), ); let workspace_root = get_workspace_root(&package_root).map(|p| helpers::get_abs_path(&p)); let root_rescript_config = - packages::read_bsconfig(&workspace_root.to_owned().unwrap_or(package_root.to_owned())); - let rescript_config = packages::read_bsconfig(&package_root); + packages::read_config(&workspace_root.to_owned().unwrap_or(package_root.to_owned())); + let rescript_config = packages::read_config(&package_root); let rescript_version = if let Some(rescript_version) = rescript_version { rescript_version } else { diff --git a/src/build/build_types.rs b/src/build/build_types.rs index 4dc6cfad..04f71322 100644 --- a/src/build/build_types.rs +++ b/src/build/build_types.rs @@ -119,6 +119,7 @@ impl BuildState { deps_initialized: false, } } + pub fn insert_module(&mut self, module_name: &str, module: Module) { self.modules.insert(module_name.to_owned(), module); self.module_names.insert(module_name.to_owned()); diff --git a/src/build/clean.rs b/src/build/clean.rs index de02747c..25835f18 100644 --- a/src/build/clean.rs +++ b/src/build/clean.rs @@ -74,7 +74,7 @@ pub fn clean_mjs_files(build_state: &BuildState) { .join(&source_file.implementation.path) .to_string_lossy() .to_string(), - root_package.bsconfig.get_suffix(), + root_package.config.get_suffix(), )) } _ => None, diff --git a/src/build/compile.rs b/src/build/compile.rs index d185f059..408d3557 100644 --- a/src/build/compile.rs +++ b/src/build/compile.rs @@ -5,7 +5,7 @@ mod dependency_cycle; use super::build_types::*; use super::logs; use super::packages; -use crate::bsconfig; +use crate::config; use crate::helpers; use ahash::{AHashMap, AHashSet}; use console::style; @@ -359,8 +359,8 @@ pub fn compile( } pub fn compiler_args( - config: &bsconfig::Config, - root_config: &bsconfig::Config, + config: &config::Config, + root_config: &config::Config, ast_path: &str, version: &str, file_path: &str, @@ -374,11 +374,11 @@ pub fn compiler_args( ) -> Vec { let normal_deps = config.bs_dependencies.as_ref().unwrap_or(&vec![]).to_owned(); - let bsc_flags = bsconfig::flatten_flags(&config.bsc_flags); + let bsc_flags = config::flatten_flags(&config.bsc_flags); // don't compile dev-deps yet // let dev_deps = source // .package - // .bsconfig + // .config // .bs_dev_dependencies // .as_ref() // .unwrap_or(&vec![]) @@ -442,10 +442,10 @@ pub fn compiler_args( }; let warn_error = match warnings.error { - Some(bsconfig::Error::Catchall(true)) => { + Some(config::Error::Catchall(true)) => { vec!["-warn-error".to_string(), "A".to_string()] } - Some(bsconfig::Error::Qualified(errors)) => { + Some(config::Error::Qualified(errors)) => { vec!["-warn-error".to_string(), errors.to_string()] } _ => vec![], @@ -532,8 +532,8 @@ fn compile_file( let module_name = helpers::file_path_to_module_name(implementation_file_path, &package.namespace); let has_interface = module.get_interface().is_some(); let to_mjs_args = compiler_args( - &package.bsconfig, - &root_package.bsconfig, + &package.config, + &root_package.config, ast_path, version, implementation_file_path, diff --git a/src/build/packages.rs b/src/build/packages.rs index ce77d4c4..c17be493 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -1,7 +1,7 @@ use super::build_types::*; use super::namespaces; use super::packages; -use crate::bsconfig; +use crate::config; use crate::helpers; use crate::helpers::emojis::*; use ahash::{AHashMap, AHashSet}; @@ -39,7 +39,7 @@ impl Namespace { #[derive(Debug, Clone)] struct Dependency { name: String, - bsconfig: bsconfig::Config, + config: config::Config, path: String, is_pinned: bool, dependencies: Vec, @@ -48,8 +48,8 @@ struct Dependency { #[derive(Debug, Clone)] pub struct Package { pub name: String, - pub bsconfig: bsconfig::Config, - pub source_folders: AHashSet, + pub config: config::Config, + pub source_folders: AHashSet, // these are the relative file paths (relative to the package root) pub source_files: Option>, pub namespace: Namespace, @@ -177,25 +177,25 @@ pub fn read_folders( Ok(map) } -/// Given a projects' root folder and a `bsconfig::Source`, this recursively creates all the +/// Given a projects' root folder and a `config::Source`, this recursively creates all the /// sources in a flat list. In the process, it removes the children, as they are being resolved /// because of the recursiveness. So you get a flat list of files back, retaining the type_ and /// whether it needs to recurse into all structures -fn get_source_dirs(source: bsconfig::Source, sub_path: Option) -> AHashSet { - let mut source_folders: AHashSet = AHashSet::new(); +fn get_source_dirs(source: config::Source, sub_path: Option) -> AHashSet { + let mut source_folders: AHashSet = AHashSet::new(); let source_folder = source.to_qualified_without_children(sub_path.to_owned()); source_folders.insert(source_folder.to_owned()); let (subdirs, full_recursive) = match source.to_owned() { - bsconfig::Source::Shorthand(_) - | bsconfig::Source::Qualified(bsconfig::PackageSource { subdirs: None, .. }) => (None, false), - bsconfig::Source::Qualified(bsconfig::PackageSource { - subdirs: Some(bsconfig::Subdirs::Recurse(recurse)), + config::Source::Shorthand(_) + | config::Source::Qualified(config::PackageSource { subdirs: None, .. }) => (None, false), + config::Source::Qualified(config::PackageSource { + subdirs: Some(config::Subdirs::Recurse(recurse)), .. }) => (None, recurse), - bsconfig::Source::Qualified(bsconfig::PackageSource { - subdirs: Some(bsconfig::Subdirs::Qualified(subdirs)), + config::Source::Qualified(config::PackageSource { + subdirs: Some(config::Subdirs::Qualified(subdirs)), .. }) => (Some(subdirs), false), }; @@ -205,8 +205,10 @@ fn get_source_dirs(source: bsconfig::Source, sub_path: Option) -> AHash subdirs .unwrap_or(vec![]) .par_iter() - .map(|subsource| get_source_dirs(subsource.set_type(source.get_type()), Some(sub_path.to_owned()))) - .collect::>>() + .map(|subsource| { + get_source_dirs(subsource.set_type(source.get_type()), Some(sub_path.to_owned())) + }) + .collect::>>() .into_iter() .for_each(|subdir| source_folders.extend(subdir)) } @@ -214,7 +216,7 @@ fn get_source_dirs(source: bsconfig::Source, sub_path: Option) -> AHash source_folders } -pub fn read_bsconfig(package_dir: &str) -> bsconfig::Config { +pub fn read_config(package_dir: &str) -> config::Config { let prefix = if package_dir.is_empty() { "".to_string() } else { @@ -225,9 +227,9 @@ pub fn read_bsconfig(package_dir: &str) -> bsconfig::Config { let bsconfig_json_path = prefix.to_string() + "bsconfig.json"; if Path::new(&rescript_json_path).exists() { - bsconfig::read(rescript_json_path) + config::read(rescript_json_path) } else { - bsconfig::read(bsconfig_json_path) + config::read(bsconfig_json_path) } } @@ -276,20 +278,20 @@ pub fn read_dependency( /// # Make Package -/// Given a bsconfig, recursively finds all dependencies. +/// Given a config, recursively finds all dependencies. /// 1. It starts with registering dependencies and /// prevents the operation for the ones which are already /// registerd for the parent packages. Especially relevant for peerDependencies. -/// 2. In parallel performs IO to read the dependencies bsconfig and +/// 2. In parallel performs IO to read the dependencies config and /// recursively continues operation for their dependencies as well. fn read_dependencies( registered_dependencies_set: &mut AHashSet, - parent_bsconfig: &bsconfig::Config, + parent_config: &config::Config, parent_path: &str, project_root: &str, workspace_root: Option, ) -> Vec { - return parent_bsconfig + return parent_config .bs_dependencies .to_owned() .unwrap_or_default() @@ -303,10 +305,10 @@ fn read_dependencies( } }) .collect::>() - // Read all bsconfig files in parallel instead of blocking + // Read all config files in parallel instead of blocking .par_iter() .map(|package_name| { - let (bsconfig, canonical_path) = + let (config, canonical_path) = match read_dependency(package_name, parent_path, project_root, &workspace_root) { Err(error) => { log::error!( @@ -317,17 +319,17 @@ fn read_dependencies( ); std::process::exit(2) } - Ok(canonical_path) => (read_bsconfig(&canonical_path), canonical_path), + Ok(canonical_path) => (read_config(&canonical_path), canonical_path), }; - let is_pinned = parent_bsconfig + let is_pinned = parent_config .pinned_dependencies .as_ref() - .map(|p| p.contains(&bsconfig.name)) + .map(|p| p.contains(&config.name)) .unwrap_or(false); let dependencies = read_dependencies( &mut registered_dependencies_set.to_owned(), - &bsconfig, + &config, &canonical_path, project_root, workspace_root.to_owned(), @@ -335,7 +337,7 @@ fn read_dependencies( Dependency { name: package_name.to_owned(), - bsconfig, + config, path: canonical_path, is_pinned, dependencies, @@ -354,20 +356,15 @@ fn flatten_dependencies(dependencies: Vec) -> Vec { flattened } -fn make_package( - bsconfig: bsconfig::Config, - package_path: &str, - is_pinned_dep: bool, - is_root: bool, -) -> Package { - let source_folders = match bsconfig.sources.to_owned() { - bsconfig::OneOrMore::Single(source) => get_source_dirs(source, None), - bsconfig::OneOrMore::Multiple(sources) => { - let mut source_folders: AHashSet = AHashSet::new(); +fn make_package(config: config::Config, package_path: &str, is_pinned_dep: bool, is_root: bool) -> Package { + let source_folders = match config.sources.to_owned() { + config::OneOrMore::Single(source) => get_source_dirs(source, None), + config::OneOrMore::Multiple(sources) => { + let mut source_folders: AHashSet = AHashSet::new(); sources .iter() .map(|source| get_source_dirs(source.to_owned(), None)) - .collect::>>() + .collect::>>() .into_iter() .for_each(|source| source_folders.extend(source)); source_folders @@ -375,11 +372,11 @@ fn make_package( }; Package { - name: bsconfig.name.to_owned(), - bsconfig: bsconfig.to_owned(), + name: config.name.to_owned(), + config: config.to_owned(), source_folders, source_files: None, - namespace: bsconfig.get_namespace(), + namespace: config.get_namespace(), modules: None, // we canonicalize the path name so it's always the same path: PathBuf::from(package_path) @@ -394,19 +391,19 @@ fn make_package( } fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap { - let root_bsconfig = read_bsconfig(project_root); + let root_config = read_config(project_root); // Store all packages and completely deduplicate them let mut map: AHashMap = AHashMap::new(); map.insert( - root_bsconfig.name.to_owned(), - make_package(root_bsconfig.to_owned(), project_root, false, true), + root_config.name.to_owned(), + make_package(root_config.to_owned(), project_root, false, true), ); let mut registered_dependencies_set: AHashSet = AHashSet::new(); let dependencies = flatten_dependencies(read_dependencies( &mut registered_dependencies_set, - &root_bsconfig, + &root_config, project_root, project_root, workspace_root, @@ -415,7 +412,7 @@ fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap if !map.contains_key(&d.name) { map.insert( d.name.to_owned(), - make_package(d.bsconfig.to_owned(), &d.path, d.is_pinned, false), + make_package(d.config.to_owned(), &d.path, d.is_pinned, false), ); } }); @@ -425,7 +422,7 @@ fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap /// `get_source_files` is essentially a wrapper around `read_structure`, which read a /// list of files in a folder to a hashmap of `string` / `fs::Metadata` (file metadata). Reason for -/// this wrapper is the recursiveness of the `bsconfig.json` subfolders. Some sources in bsconfig +/// this wrapper is the recursiveness of the `config.json` subfolders. Some sources in config /// can be specified as being fully recursive (`{ subdirs: true }`). This wrapper pulls out that /// data from the config and pushes it forwards. Another thing is the 'type_', some files / folders /// can be marked with the type 'dev'. Which means that they may not be around in the distributed @@ -435,17 +432,17 @@ pub fn get_source_files( package_name: &String, package_dir: &Path, filter: &Option, - source: &bsconfig::PackageSource, + source: &config::PackageSource, ) -> AHashMap { let mut map: AHashMap = AHashMap::new(); let (recurse, type_) = match source { - bsconfig::PackageSource { - subdirs: Some(bsconfig::Subdirs::Recurse(subdirs)), + config::PackageSource { + subdirs: Some(config::Subdirs::Recurse(subdirs)), type_, .. } => (subdirs.to_owned(), type_), - bsconfig::PackageSource { type_, .. } => (false, type_), + config::PackageSource { type_, .. } => (false, type_), }; let path_dir = Path::new(&source.dir); @@ -512,9 +509,9 @@ fn extend_with_children( build } -/// Make turns a folder, that should contain a bsconfig, into a tree of Packages. +/// Make turns a folder, that should contain a config, into a tree of Packages. /// It does so in two steps: -/// 1. Get all the packages parsed, and take all the source folders from the bsconfig +/// 1. Get all the packages parsed, and take all the source folders from the config /// 2. Take the (by then deduplicated) packages, and find all the '.re', '.res', '.ml' and /// interface files. /// @@ -529,15 +526,19 @@ pub fn make( /* Once we have the deduplicated packages, we can add the source files for each - to minimize * the IO */ let result = extend_with_children(filter, map); - result.values().for_each(|package| if let Some(dirs) = &package.dirs { dirs.iter().for_each(|dir| { - let _ = std::fs::create_dir_all(std::path::Path::new(&package.get_bs_build_path()).join(dir)); - }) }); + result.values().for_each(|package| { + if let Some(dirs) = &package.dirs { + dirs.iter().for_each(|dir| { + let _ = std::fs::create_dir_all(std::path::Path::new(&package.get_bs_build_path()).join(dir)); + }) + } + }); result } pub fn get_package_name(path: &str) -> String { - let bsconfig = read_bsconfig(path); - bsconfig.name + let config = read_config(path); + config.name } pub fn parse_packages(build_state: &mut BuildState) { @@ -728,19 +729,19 @@ pub fn parse_packages(build_state: &mut BuildState) { impl Package { pub fn get_jsx_args(&self) -> Vec { - self.bsconfig.get_jsx_args() + self.config.get_jsx_args() } pub fn get_jsx_mode_args(&self) -> Vec { - self.bsconfig.get_jsx_mode_args() + self.config.get_jsx_mode_args() } pub fn get_jsx_module_args(&self) -> Vec { - self.bsconfig.get_jsx_module_args() + self.config.get_jsx_module_args() } pub fn get_uncurried_args(&self, version: &str, root_package: &packages::Package) -> Vec { - root_package.bsconfig.get_uncurried_args(version) + root_package.config.get_uncurried_args(version) } } @@ -751,7 +752,7 @@ fn get_unallowed_dependents( ) -> Option { for deps_package_name in dependencies { if let Some(deps_package) = packages.get(deps_package_name) { - let deps_allowed_dependents = deps_package.bsconfig.allowed_dependents.to_owned(); + let deps_allowed_dependents = deps_package.config.allowed_dependents.to_owned(); if let Some(allowed_dependents) = deps_allowed_dependents { if !allowed_dependents.contains(package_name) { return Some(deps_package_name.to_string()); @@ -772,13 +773,15 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b let mut detected_unallowed_dependencies: AHashMap = AHashMap::new(); for (package_name, package) in packages { - let bs_dependencies = &package.bsconfig.bs_dependencies.to_owned().unwrap_or(vec![]); - let pinned_dependencies = &package.bsconfig.pinned_dependencies.to_owned().unwrap_or(vec![]); - let dev_dependencies = &package.bsconfig.bs_dev_dependencies.to_owned().unwrap_or(vec![]); + let bs_dependencies = &package.config.bs_dependencies.to_owned().unwrap_or(vec![]); + let pinned_dependencies = &package.config.pinned_dependencies.to_owned().unwrap_or(vec![]); + let dev_dependencies = &package.config.bs_dev_dependencies.to_owned().unwrap_or(vec![]); - [("bs-dependencies", bs_dependencies), + [ + ("bs-dependencies", bs_dependencies), ("pinned-dependencies", pinned_dependencies), - ("bs-dev-dependencies", dev_dependencies)] + ("bs-dev-dependencies", dev_dependencies), + ] .iter() .for_each(|(dependency_type, dependencies)| { if let Some(unallowed_dependency_name) = @@ -808,9 +811,11 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b console::style(package_name).bold() ); - [("bs-dependencies", unallowed_deps.bs_deps.to_owned()), + [ + ("bs-dependencies", unallowed_deps.bs_deps.to_owned()), ("pinned-dependencies", unallowed_deps.pinned_deps.to_owned()), - ("bs-dev-dependencies", unallowed_deps.bs_dev_deps.to_owned())] + ("bs-dev-dependencies", unallowed_deps.bs_dev_deps.to_owned()), + ] .iter() .for_each(|(deps_type, map)| { if !map.is_empty() { @@ -828,7 +833,7 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b log::error!( "\nUpdate the {} value in the {} of the unallowed dependencies to solve the issue!", console::style("unallowed_dependents").bold().dim(), - console::style("bsconfig.json").bold().dim() + console::style("config.json").bold().dim() ) } !has_any_unallowed_dependent @@ -836,7 +841,7 @@ pub fn validate_packages_dependencies(packages: &AHashMap) -> b #[cfg(test)] mod test { - use crate::bsconfig::Source; + use crate::config::Source; use ahash::{AHashMap, AHashSet}; use super::{Namespace, Package}; @@ -850,9 +855,9 @@ mod test { ) -> Package { Package { name: name.clone(), - bsconfig: crate::bsconfig::Config { + config: crate::config::Config { name: name.clone(), - sources: crate::bsconfig::OneOrMore::Single(Source::Shorthand(String::from("Source"))), + sources: crate::config::OneOrMore::Single(Source::Shorthand(String::from("Source"))), package_specs: None, warnings: None, suffix: None, diff --git a/src/build/parse.rs b/src/build/parse.rs index 2452fa6f..b8a34494 100644 --- a/src/build/parse.rs +++ b/src/build/parse.rs @@ -2,8 +2,8 @@ use super::build_types::*; use super::logs; use super::namespaces; use super::packages; -use crate::bsconfig; -use crate::bsconfig::OneOrMore; +use crate::config; +use crate::config::OneOrMore; use crate::helpers; use ahash::AHashSet; use log::debug; @@ -248,8 +248,8 @@ pub fn generate_asts( } pub fn parser_args( - config: &bsconfig::Config, - root_config: &bsconfig::Config, + config: &config::Config, + root_config: &config::Config, filename: &str, version: &str, workspace_root: &Option, @@ -260,7 +260,7 @@ pub fn parser_args( let path = PathBuf::from(filename); let ast_extension = path_to_ast_extension(&path); let ast_path = (helpers::get_basename(&file.to_string()).to_owned()) + ast_extension; - let ppx_flags = bsconfig::flatten_ppx_flags( + let ppx_flags = config::flatten_ppx_flags( &if let Some(workspace_root) = workspace_root { format!("{}/node_modules", &workspace_root) } else { @@ -273,7 +273,7 @@ pub fn parser_args( let jsx_module_args = root_config.get_jsx_module_args(); let jsx_mode_args = root_config.get_jsx_mode_args(); let uncurried_args = root_config.get_uncurried_args(version); - let bsc_flags = bsconfig::flatten_flags(&config.bsc_flags); + let bsc_flags = config::flatten_flags(&config.bsc_flags); let file = "../../".to_string() + file; ( @@ -311,8 +311,8 @@ fn generate_ast( let build_path_abs = package.get_build_path(); let (ast_path, parser_args) = parser_args( - &package.bsconfig, - &root_package.bsconfig, + &package.config, + &root_package.config, filename, version, workspace_root, @@ -392,8 +392,8 @@ fn filter_ppx_flags( flags .iter() .filter(|flag| match flag { - bsconfig::OneOrMore::Single(str) => include_ppx(str, contents), - bsconfig::OneOrMore::Multiple(str) => include_ppx(str.first().unwrap(), contents), + config::OneOrMore::Single(str) => include_ppx(str, contents), + config::OneOrMore::Multiple(str) => include_ppx(str.first().unwrap(), contents), }) .map(|x| x.to_owned()) .collect::>>() diff --git a/src/build/read_compile_state.rs b/src/build/read_compile_state.rs index 7d983f5a..b6cbc9a3 100644 --- a/src/build/read_compile_state.rs +++ b/src/build/read_compile_state.rs @@ -103,7 +103,7 @@ pub fn read(build_state: &mut BuildState) -> CompileAssetsState { last_modified: last_modified.to_owned(), ast_file_path, is_root: *package_is_root, - suffix: root_package.bsconfig.get_suffix(), + suffix: root_package.config.get_suffix(), }, ); let _ = ast_rescript_file_locations.insert(res_file_path); diff --git a/src/bsconfig.rs b/src/config.rs similarity index 100% rename from src/bsconfig.rs rename to src/config.rs diff --git a/src/helpers.rs b/src/helpers.rs index c4d92a01..0f1e76da 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -307,11 +307,11 @@ fn has_rescript_config(path: &Path) -> bool { pub fn get_workspace_root(package_root: &str) -> Option { std::path::PathBuf::from(&package_root) .parent() - .and_then(get_nearest_bsconfig) + .and_then(get_nearest_config) } -// traverse up the directory tree until we find a bsconfig.json, if not return None -pub fn get_nearest_bsconfig(path_buf: &Path) -> Option { +// traverse up the directory tree until we find a config.json, if not return None +pub fn get_nearest_config(path_buf: &Path) -> Option { let mut current_dir = path_buf.to_owned(); loop { if has_rescript_config(¤t_dir) { diff --git a/src/lib.rs b/src/lib.rs index b84aed04..9dc6f559 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,6 @@ -pub mod bsconfig; pub mod build; pub mod cmd; +pub mod config; pub mod helpers; pub mod lock; pub mod queue; diff --git a/src/main.rs b/src/main.rs index 16894345..b7a4c012 100644 --- a/src/main.rs +++ b/src/main.rs @@ -25,7 +25,7 @@ struct Args { #[arg(value_enum)] command: Option, - /// The relative path to where the main bsconfig.json resides. IE - the root of your project. + /// The relative path to where the main rescript.json resides. IE - the root of your project. folder: Option, /// Filter allows for a regex to be supplied which will filter the files to be compiled. For diff --git a/src/sourcedirs.rs b/src/sourcedirs.rs index 41eafb50..ff206639 100644 --- a/src/sourcedirs.rs +++ b/src/sourcedirs.rs @@ -76,9 +76,9 @@ pub fn print(buildstate: &BuildState) { // Extract Pkgs let pkgs = [ - &package.bsconfig.pinned_dependencies, - &package.bsconfig.bs_dependencies, - &package.bsconfig.bs_dev_dependencies, + &package.config.pinned_dependencies, + &package.config.bs_dependencies, + &package.config.bs_dev_dependencies, ] .into_iter() .map(|dependencies| deps_to_pkgs(&buildstate.packages, dependencies)); From 2fdd4b503cf7da712acb06137557791b2e258727 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Mon, 18 Nov 2024 11:49:36 +0100 Subject: [PATCH 3/9] :art: - Allow no sources in rescript.json - warn when not root package --- src/build/packages.rs | 11 +++++++++-- src/config.rs | 4 +++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/build/packages.rs b/src/build/packages.rs index c17be493..edbd1a1c 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -358,8 +358,8 @@ fn flatten_dependencies(dependencies: Vec) -> Vec { fn make_package(config: config::Config, package_path: &str, is_pinned_dep: bool, is_root: bool) -> Package { let source_folders = match config.sources.to_owned() { - config::OneOrMore::Single(source) => get_source_dirs(source, None), - config::OneOrMore::Multiple(sources) => { + Some(config::OneOrMore::Single(source)) => get_source_dirs(source, None), + Some(config::OneOrMore::Multiple(sources)) => { let mut source_folders: AHashSet = AHashSet::new(); sources .iter() @@ -369,6 +369,13 @@ fn make_package(config: config::Config, package_path: &str, is_pinned_dep: bool, .for_each(|source| source_folders.extend(source)); source_folders } + None => { + if !is_root { + log::warn!("Package '{}' has not defined any sources, but is not the root package. This is likely a mistake. It is located: {}", config.name, package_path); + } + + AHashSet::new() + } }; Package { diff --git a/src/config.rs b/src/config.rs index 7a6a28f1..b242de40 100644 --- a/src/config.rs +++ b/src/config.rs @@ -171,7 +171,9 @@ pub type GenTypeConfig = serde_json::Value; #[derive(Deserialize, Debug, Clone)] pub struct Config { pub name: String, - pub sources: OneOrMore, + // In the case of monorepos, the root source won't necessarily have to have sources. It can + // just be sources in packages + pub sources: Option>, #[serde(rename = "package-specs")] pub package_specs: Option>, pub warnings: Option, From 17556b9ce68e83c98e2f861bd1464008632079e0 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Mon, 18 Nov 2024 11:58:33 +0100 Subject: [PATCH 4/9] :goal_net: - Drop panic --- Cargo.lock | 7 ++ Cargo.toml | 1 + src/build/build_types.rs | 11 ++- src/build/compile.rs | 191 +++++++++++++++++++-------------------- 4 files changed, 113 insertions(+), 97 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 99024612..e1825523 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -72,6 +72,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "anyhow" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" + [[package]] name = "arrayref" version = "0.3.7" @@ -713,6 +719,7 @@ name = "rewatch" version = "1.0.9" dependencies = [ "ahash", + "anyhow", "blake3", "clap", "clap-verbosity-flag", diff --git a/Cargo.toml b/Cargo.toml index 5195fecd..7266bb61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ clap = { version = "4.3.17", features = ["derive"] } sysinfo = "0.29.10" ctrlc = "3.4.4" clap-verbosity-flag = "2.2.2" +anyhow = "1.0.93" [profile.release] diff --git a/src/build/build_types.rs b/src/build/build_types.rs index 04f71322..95e90396 100644 --- a/src/build/build_types.rs +++ b/src/build/build_types.rs @@ -1,6 +1,6 @@ use crate::build::packages::{Namespace, Package}; use ahash::{AHashMap, AHashSet}; -use std::time::SystemTime; +use std::{fmt::Display, time::SystemTime}; #[derive(Debug, Clone, PartialEq)] pub enum ParseState { @@ -52,6 +52,15 @@ pub enum SourceType { MlMap(MlMap), } +impl Display for SourceType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + SourceType::SourceFile(_) => write!(f, "SourceFile"), + SourceType::MlMap(_) => write!(f, "MlMap"), + } + } +} + #[derive(Debug, Clone)] pub struct Module { pub source_type: SourceType, diff --git a/src/build/compile.rs b/src/build/compile.rs index 408d3557..b08d0b8a 100644 --- a/src/build/compile.rs +++ b/src/build/compile.rs @@ -8,6 +8,7 @@ use super::packages; use crate::config; use crate::helpers; use ahash::{AHashMap, AHashSet}; +use anyhow::{anyhow, Result}; use console::style; use log::{debug, log_enabled, trace, Level::Info}; use rayon::prelude::*; @@ -225,106 +226,100 @@ pub fn compile( } }) }) - .collect::, String>, - Option, String>>, - bool, - bool, - )>, - >>() + .collect::>() .iter() - .for_each(|result| if let Some((module_name, result, interface_result, is_clean, is_compiled)) = result { - in_progress_modules.remove(module_name); + .for_each(|result| { + if let Some((module_name, result, interface_result, is_clean, is_compiled)) = result { + in_progress_modules.remove(module_name); - if *is_compiled { - num_compiled_modules += 1; - } + if *is_compiled { + num_compiled_modules += 1; + } - files_current_loop_count += 1; - compiled_modules.insert(module_name.to_string()); + files_current_loop_count += 1; + compiled_modules.insert(module_name.to_string()); - if *is_clean { - // actually add it to a list of clean modules - clean_modules.insert(module_name.to_string()); - } + if *is_clean { + // actually add it to a list of clean modules + clean_modules.insert(module_name.to_string()); + } - let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); + let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); - // if not clean -- compile modules that depend on this module - for dep in module_dependents.iter() { - // mark the reverse dep as dirty when the source is not clean - if !*is_clean { - let dep_module = build_state.modules.get_mut(dep).unwrap(); + // if not clean -- compile modules that depend on this module + for dep in module_dependents.iter() { // mark the reverse dep as dirty when the source is not clean - dep_module.compile_dirty = true; - } - if !compiled_modules.contains(dep) { - in_progress_modules.insert(dep.to_string()); + if !*is_clean { + let dep_module = build_state.modules.get_mut(dep).unwrap(); + // mark the reverse dep as dirty when the source is not clean + dep_module.compile_dirty = true; + } + if !compiled_modules.contains(dep) { + in_progress_modules.insert(dep.to_string()); + } } - } - let module = build_state.modules.get_mut(module_name).unwrap(); - let package = build_state - .packages - .get(&module.package_name) - .expect("Package not found"); - match module.source_type { - SourceType::MlMap(ref mut mlmap) => { - module.compile_dirty = false; - mlmap.parse_dirty = false; - } - SourceType::SourceFile(ref mut source_file) => { - match result { - Ok(Some(err)) => { - source_file.implementation.compile_state = CompileState::Warning; - logs::append(package, err); - compile_warnings.push_str(err); - } - Ok(None) => { - source_file.implementation.compile_state = CompileState::Success; - } - Err(err) => { - source_file.implementation.compile_state = CompileState::Error; - logs::append(package, err); - compile_errors.push_str(err); - } - }; - match interface_result { - Some(Ok(Some(err))) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Warning; - logs::append(package, err); - compile_warnings.push_str(err); - } - Some(Ok(None)) => { - if let Some(interface) = source_file.interface.as_mut() { - interface.compile_state = CompileState::Success; + let module = build_state.modules.get_mut(module_name).unwrap(); + let package = build_state + .packages + .get(&module.package_name) + .expect("Package not found"); + match module.source_type { + SourceType::MlMap(ref mut mlmap) => { + module.compile_dirty = false; + mlmap.parse_dirty = false; + } + SourceType::SourceFile(ref mut source_file) => { + match result { + Ok(Some(err)) => { + source_file.implementation.compile_state = CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); + } + Ok(None) => { + source_file.implementation.compile_state = CompileState::Success; + } + Err(err) => { + source_file.implementation.compile_state = CompileState::Error; + logs::append(package, &err.to_string()); + compile_errors.push_str(&err.to_string()); + } + }; + match interface_result { + Some(Ok(Some(err))) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Warning; + logs::append(package, &err.to_string()); + compile_warnings.push_str(&err.to_string()); + } + Some(Ok(None)) => { + if let Some(interface) = source_file.interface.as_mut() { + interface.compile_state = CompileState::Success; + } } - } - Some(Err(err)) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Error; - logs::append(package, err); - compile_errors.push_str(err); - } - _ => (), - }; - match (result, interface_result) { - // successfull compilation - (Ok(None), Some(Ok(None))) | (Ok(None), None) => { - module.compile_dirty = false; - module.last_compiled_cmi = Some(SystemTime::now()); - module.last_compiled_cmt = Some(SystemTime::now()); - } - // some error or warning - (Err(_), _) - | (_, Some(Err(_))) - | (Ok(Some(_)), _) - | (_, Some(Ok(Some(_)))) => { - module.compile_dirty = true; + Some(Err(err)) => { + source_file.interface.as_mut().unwrap().compile_state = + CompileState::Error; + logs::append(package, &err.to_string()); + compile_errors.push_str(&err.to_string()); + } + _ => (), + }; + match (result, interface_result) { + // successfull compilation + (Ok(None), Some(Ok(None))) | (Ok(None), None) => { + module.compile_dirty = false; + module.last_compiled_cmi = Some(SystemTime::now()); + module.last_compiled_cmt = Some(SystemTime::now()); + } + // some error or warning + (Err(_), _) + | (_, Some(Err(_))) + | (Ok(Some(_)), _) + | (_, Some(Ok(Some(_)))) => { + module.compile_dirty = true; + } } } } @@ -523,12 +518,16 @@ fn compile_file( packages: &AHashMap, project_root: &str, workspace_root: &Option, -) -> Result, String> { +) -> Result> { let build_path_abs = package.get_build_path(); - let implementation_file_path = match module.source_type { - SourceType::SourceFile(ref source_file) => &source_file.implementation.path, - _ => panic!("Not a source file"), - }; + let implementation_file_path = match &module.source_type { + SourceType::SourceFile(ref source_file) => Ok(&source_file.implementation.path), + sourcetype => Err(anyhow!( + "Tried to compile a file that is not a source file ({}). Path to AST: {}. ", + sourcetype, + ast_path + )), + }?; let module_name = helpers::file_path_to_module_name(implementation_file_path, &package.namespace); let has_interface = module.get_interface().is_some(); let to_mjs_args = compiler_args( @@ -553,9 +552,9 @@ fn compile_file( Ok(x) if !x.status.success() => { let stderr = String::from_utf8_lossy(&x.stderr); let stdout = String::from_utf8_lossy(&x.stdout); - Err(stderr.to_string() + &stdout) + Err(anyhow!(stderr.to_string() + &stdout)) } - Err(e) => Err(format!("ERROR, {}, {:?}", e, ast_path)), + Err(e) => Err(anyhow!("Could not compile file. Error: {}. Path to AST: {:?}", e, ast_path)), Ok(x) => { let err = std::str::from_utf8(&x.stderr) .expect("stdout should be non-null") From 95ca414e868ba0a30b940fbc38d35675aa7accc4 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Mon, 18 Nov 2024 12:14:19 +0100 Subject: [PATCH 5/9] :goal_net: - Better error flow --- src/build.rs | 15 +++- src/build/compile.rs | 198 +++++++++++++++++++++---------------------- 2 files changed, 108 insertions(+), 105 deletions(-) diff --git a/src/build.rs b/src/build.rs index c07b6872..4d4f852f 100644 --- a/src/build.rs +++ b/src/build.rs @@ -263,14 +263,19 @@ fn format_step(current: usize, total: usize) -> console::StyledObject { #[derive(Debug, Clone)] pub enum IncrementalBuildError { SourceFileParseError, - CompileError, + CompileError(Option), } impl fmt::Display for IncrementalBuildError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::SourceFileParseError => write!(f, "{} {}Could not parse Source Files", LINE_CLEAR, CROSS,), - Self::CompileError => write!(f, "{} {}Failed to Compile. See Errors Above", LINE_CLEAR, CROSS,), + Self::CompileError(Some(e)) => { + write!(f, "{} {}Failed to Compile. Error: {e}", LINE_CLEAR, CROSS,) + } + Self::CompileError(None) => { + write!(f, "{} {}Failed to Compile. See Errors Above", LINE_CLEAR, CROSS,) + } } } } @@ -380,7 +385,9 @@ pub fn incremental_build( }; let (compile_errors, compile_warnings, num_compiled_modules) = - compile::compile(build_state, || pb.inc(1), |size| pb.set_length(size)); + compile::compile(build_state, || pb.inc(1), |size| pb.set_length(size)) + .map_err(|e| IncrementalBuildError::CompileError(Some(e.to_string())))?; + let compile_duration = start_compiling.elapsed(); logs::finalize(&build_state.packages); @@ -409,7 +416,7 @@ pub fn incremental_build( module.compile_dirty = true; } } - Err(IncrementalBuildError::CompileError) + Err(IncrementalBuildError::CompileError(None)) } else { if show_progress { println!( diff --git a/src/build/compile.rs b/src/build/compile.rs index b08d0b8a..e4cb1780 100644 --- a/src/build/compile.rs +++ b/src/build/compile.rs @@ -20,7 +20,7 @@ pub fn compile( build_state: &mut BuildState, inc: impl Fn() + std::marker::Sync, set_length: impl Fn(u64), -) -> (String, String, usize) { +) -> Result<(String, String, usize)> { let mut compiled_modules = AHashSet::::new(); let dirty_modules = build_state .modules @@ -107,9 +107,9 @@ pub fn compile( let current_in_progres_modules = in_progress_modules.clone(); - current_in_progres_modules + let results = current_in_progres_modules .par_iter() - .map(|module_name| { + .filter_map(|module_name| { let module = build_state.get_module(module_name).unwrap(); let package = build_state .get_package(&module.package_name) @@ -185,17 +185,8 @@ pub fn compile( &build_state.project_root, &build_state.workspace_root, ); - // if let Err(error) = result.to_owned() { - // println!("{}", error); - // panic!("Implementation compilation error!"); - // } let cmi_digest_after = helpers::compute_file_hash(&cmi_path); - // println!( - // "cmi path {}, digest: {:?} / {:?}", - // cmi_path, cmi_digest, cmi_digest_after - // ); - // we want to compare both the hash of interface and the implementation // compile assets to verify that nothing changed. We also need to checke the interface // because we can include MyModule, so the modules that depend on this module might @@ -226,105 +217,105 @@ pub fn compile( } }) }) - .collect::>() - .iter() - .for_each(|result| { - if let Some((module_name, result, interface_result, is_clean, is_compiled)) = result { - in_progress_modules.remove(module_name); - - if *is_compiled { - num_compiled_modules += 1; - } + .collect::>(); - files_current_loop_count += 1; - compiled_modules.insert(module_name.to_string()); + for result in results.iter() { + let (module_name, result, interface_result, is_clean, is_compiled) = result; + in_progress_modules.remove(module_name); - if *is_clean { - // actually add it to a list of clean modules - clean_modules.insert(module_name.to_string()); - } + if *is_compiled { + num_compiled_modules += 1; + } + + files_current_loop_count += 1; + compiled_modules.insert(module_name.to_string()); - let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); + if *is_clean { + // actually add it to a list of clean modules + clean_modules.insert(module_name.to_string()); + } + + let module_dependents = build_state.get_module(module_name).unwrap().dependents.clone(); - // if not clean -- compile modules that depend on this module - for dep in module_dependents.iter() { - // mark the reverse dep as dirty when the source is not clean - if !*is_clean { - let dep_module = build_state.modules.get_mut(dep).unwrap(); - // mark the reverse dep as dirty when the source is not clean - dep_module.compile_dirty = true; + // if not clean -- compile modules that depend on this module + for dep in module_dependents.iter() { + // mark the reverse dep as dirty when the source is not clean + if !*is_clean { + let dep_module = build_state.modules.get_mut(dep).unwrap(); + // mark the reverse dep as dirty when the source is not clean + dep_module.compile_dirty = true; + } + if !compiled_modules.contains(dep) { + in_progress_modules.insert(dep.to_string()); + } + } + + let module = build_state + .modules + .get_mut(module_name) + .ok_or(anyhow!("Module not found"))?; + + let package = build_state + .packages + .get(&module.package_name) + .ok_or(anyhow!("Package name not found"))?; + + match module.source_type { + SourceType::MlMap(ref mut mlmap) => { + module.compile_dirty = false; + mlmap.parse_dirty = false; + } + SourceType::SourceFile(ref mut source_file) => { + match result { + Ok(Some(err)) => { + source_file.implementation.compile_state = CompileState::Warning; + logs::append(package, err); + compile_warnings.push_str(err); } - if !compiled_modules.contains(dep) { - in_progress_modules.insert(dep.to_string()); + Ok(None) => { + source_file.implementation.compile_state = CompileState::Success; + } + Err(err) => { + source_file.implementation.compile_state = CompileState::Error; + logs::append(package, &err.to_string()); + compile_errors.push_str(&err.to_string()); + } + }; + match interface_result { + Some(Ok(Some(err))) => { + source_file.interface.as_mut().unwrap().compile_state = CompileState::Warning; + logs::append(package, &err.to_string()); + compile_warnings.push_str(&err.to_string()); + } + Some(Ok(None)) => { + if let Some(interface) = source_file.interface.as_mut() { + interface.compile_state = CompileState::Success; + } } - } - let module = build_state.modules.get_mut(module_name).unwrap(); - let package = build_state - .packages - .get(&module.package_name) - .expect("Package not found"); - match module.source_type { - SourceType::MlMap(ref mut mlmap) => { - module.compile_dirty = false; - mlmap.parse_dirty = false; + Some(Err(err)) => { + source_file.interface.as_mut().unwrap().compile_state = CompileState::Error; + logs::append(package, &err.to_string()); + compile_errors.push_str(&err.to_string()); } - SourceType::SourceFile(ref mut source_file) => { - match result { - Ok(Some(err)) => { - source_file.implementation.compile_state = CompileState::Warning; - logs::append(package, err); - compile_warnings.push_str(err); - } - Ok(None) => { - source_file.implementation.compile_state = CompileState::Success; - } - Err(err) => { - source_file.implementation.compile_state = CompileState::Error; - logs::append(package, &err.to_string()); - compile_errors.push_str(&err.to_string()); - } - }; - match interface_result { - Some(Ok(Some(err))) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Warning; - logs::append(package, &err.to_string()); - compile_warnings.push_str(&err.to_string()); - } - Some(Ok(None)) => { - if let Some(interface) = source_file.interface.as_mut() { - interface.compile_state = CompileState::Success; - } - } + _ => (), + }; - Some(Err(err)) => { - source_file.interface.as_mut().unwrap().compile_state = - CompileState::Error; - logs::append(package, &err.to_string()); - compile_errors.push_str(&err.to_string()); - } - _ => (), - }; - match (result, interface_result) { - // successfull compilation - (Ok(None), Some(Ok(None))) | (Ok(None), None) => { - module.compile_dirty = false; - module.last_compiled_cmi = Some(SystemTime::now()); - module.last_compiled_cmt = Some(SystemTime::now()); - } - // some error or warning - (Err(_), _) - | (_, Some(Err(_))) - | (Ok(Some(_)), _) - | (_, Some(Ok(Some(_)))) => { - module.compile_dirty = true; - } - } + match (result, interface_result) { + // successfull compilation + (Ok(None), Some(Ok(None))) | (Ok(None), None) => { + module.compile_dirty = false; + module.last_compiled_cmi = Some(SystemTime::now()); + module.last_compiled_cmt = Some(SystemTime::now()); + } + // some error or warning + (Err(_), _) | (_, Some(Err(_))) | (Ok(Some(_)), _) | (_, Some(Ok(Some(_)))) => { + module.compile_dirty = true; } } } - }); + } + } files_total_count += files_current_loop_count; @@ -339,6 +330,7 @@ pub fn compile( .map(|s| (s, build_state.get_module(s).unwrap())) .collect::>(), ); + compile_errors.push_str(&format!( "\n{}\n{}\n", style("Can't continue... Found a circular dependency in your code:").red(), @@ -350,7 +342,7 @@ pub fn compile( }; } - (compile_errors, compile_warnings, num_compiled_modules) + Ok((compile_errors, compile_warnings, num_compiled_modules)) } pub fn compiler_args( @@ -554,7 +546,11 @@ fn compile_file( let stdout = String::from_utf8_lossy(&x.stdout); Err(anyhow!(stderr.to_string() + &stdout)) } - Err(e) => Err(anyhow!("Could not compile file. Error: {}. Path to AST: {:?}", e, ast_path)), + Err(e) => Err(anyhow!( + "Could not compile file. Error: {}. Path to AST: {:?}", + e, + ast_path + )), Ok(x) => { let err = std::str::from_utf8(&x.stderr) .expect("stdout should be non-null") From ae6c713095899546fd716a5652603b6797e81190 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Wed, 20 Nov 2024 10:26:30 +0100 Subject: [PATCH 6/9] :bug: - Allow arbitrary modules in JSX --- src/build/packages.rs | 4 +++- src/config.rs | 47 ++++++++++++++++++++++++++++++++++++------- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/src/build/packages.rs b/src/build/packages.rs index edbd1a1c..607a8209 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -864,7 +864,9 @@ mod test { name: name.clone(), config: crate::config::Config { name: name.clone(), - sources: crate::config::OneOrMore::Single(Source::Shorthand(String::from("Source"))), + sources: Some(crate::config::OneOrMore::Single(Source::Shorthand(String::from( + "Source", + )))), package_specs: None, warnings: None, suffix: None, diff --git a/src/config.rs b/src/config.rs index b242de40..e95707e8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -140,21 +140,22 @@ pub enum NamespaceConfig { String(String), } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] pub enum JsxMode { - #[serde(rename = "classic")] Classic, - #[serde(rename = "automatic")] Automatic, } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +#[serde(untagged)] pub enum JsxModule { - #[serde(rename = "react")] React, + Other(String), } -#[derive(Deserialize, Debug, Clone)] +#[derive(Deserialize, Debug, Clone, Eq, PartialEq)] pub struct JsxSpecs { pub version: Option, pub module: Option, @@ -376,6 +377,9 @@ impl Config { Some(JsxModule::React) => { vec!["-bs-jsx-module".to_string(), "react".to_string()] } + Some(JsxModule::Other(module)) => { + vec!["-bs-jsx-module".to_string(), module] + } None => vec![], }, _ => vec![], @@ -476,7 +480,7 @@ mod tests { "#; let config = serde_json::from_str::(json).unwrap(); - if let OneOrMore::Single(source) = config.sources { + if let Some(OneOrMore::Single(source)) = config.sources { let source = source.to_qualified_without_children(None); assert_eq!(source.type_, Some(String::from("dev"))); } else { @@ -507,6 +511,35 @@ mod tests { assert_eq!(config.get_gentype_arg(), vec!["-bs-gentype".to_string()]); } + #[test] + fn test_other_jsx_module() { + let json = r#" + { + "name": "my-monorepo", + "sources": [ { "dir": "src/", "subdirs": true } ], + "package-specs": [ { "module": "es6", "in-source": true } ], + "suffix": ".mjs", + "pinned-dependencies": [ "@teamwalnut/app" ], + "bs-dependencies": [ "@teamwalnut/app" ], + "jsx": { + "module": "Voby.JSX" + } + } + "#; + + let config = serde_json::from_str::(json).unwrap(); + assert!(config.jsx.is_some()); + assert_eq!( + config.jsx.unwrap(), + JsxSpecs { + version: None, + module: Some(JsxModule::Other(String::from("Voby.JSX"))), + mode: None, + v3_dependencies: None, + }, + ); + } + #[test] fn test_check_if_rescript11_or_higher() { assert_eq!(check_if_rescript11_or_higher("11.0.0"), Ok(true)); From 9ef4f9583ebb6df601aebc1a97b27c98601d17a7 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Wed, 20 Nov 2024 10:57:38 +0100 Subject: [PATCH 7/9] :loud_sound: - Only show progress when exactly at 'Info' --- src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main.rs b/src/main.rs index b7a4c012..8e9fe683 100644 --- a/src/main.rs +++ b/src/main.rs @@ -101,7 +101,7 @@ fn main() { // The 'normal run' mode will show the 'pretty' formatted progress. But if we turn off the log // level, we should never show that. - let show_progress = log_level_filter != LevelFilter::Off; + let show_progress = log_level_filter == LevelFilter::Info; match lock::get(&folder) { lock::Lock::Error(ref e) => { From 9fabbb9b31604fd9bb8b1641da16ddbc97e800b8 Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Wed, 20 Nov 2024 10:59:44 +0100 Subject: [PATCH 8/9] :goal_net: - Don't panic when failing to parse bsconfig --- src/build.rs | 69 ++++++++++++------------------------------- src/build/clean.rs | 9 ++++-- src/build/packages.rs | 54 +++++++++++++++++++++++++-------- src/config.rs | 17 ++++------- src/main.rs | 7 +++-- 5 files changed, 78 insertions(+), 78 deletions(-) diff --git a/src/build.rs b/src/build.rs index 4d4f852f..92fb31c6 100644 --- a/src/build.rs +++ b/src/build.rs @@ -13,6 +13,7 @@ use crate::helpers::emojis::*; use crate::helpers::{self, get_workspace_root}; use crate::sourcedirs; use ahash::AHashSet; +use anyhow::{anyhow, Result}; use build_types::*; use console::style; use indicatif::{ProgressBar, ProgressStyle}; @@ -54,15 +55,19 @@ pub struct CompilerArgs { pub parser_args: Vec, } -pub fn get_compiler_args(path: &str, rescript_version: Option, bsc_path: Option) -> String { +pub fn get_compiler_args( + path: &str, + rescript_version: Option, + bsc_path: Option, +) -> Result { let filename = &helpers::get_abs_path(path); let package_root = helpers::get_abs_path( &helpers::get_nearest_config(&std::path::PathBuf::from(path)).expect("Couldn't find package root"), ); let workspace_root = get_workspace_root(&package_root).map(|p| helpers::get_abs_path(&p)); let root_rescript_config = - packages::read_config(&workspace_root.to_owned().unwrap_or(package_root.to_owned())); - let rescript_config = packages::read_config(&package_root); + packages::read_config(&workspace_root.to_owned().unwrap_or(package_root.to_owned()))?; + let rescript_config = packages::read_config(&package_root)?; let rescript_version = if let Some(rescript_version) = rescript_version { rescript_version } else { @@ -111,28 +116,13 @@ pub fn get_compiler_args(path: &str, rescript_version: Option, bsc_path: &workspace_root, &None, ); - serde_json::to_string_pretty(&CompilerArgs { + + let result = serde_json::to_string_pretty(&CompilerArgs { compiler_args, parser_args, - }) - .unwrap() -} + })?; -#[derive(Debug, Clone)] -pub enum InitializeBuildError { - PackageDependencyValidation, -} - -impl fmt::Display for InitializeBuildError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::PackageDependencyValidation => write!( - f, - "{} {}Could not Validate Package Dependencies", - LINE_CLEAR, CROSS, - ), - } - } + Ok(result) } pub fn initialize_build( @@ -141,14 +131,14 @@ pub fn initialize_build( show_progress: bool, path: &str, bsc_path: Option, -) -> Result { +) -> Result { let project_root = helpers::get_abs_path(path); let workspace_root = helpers::get_workspace_root(&project_root); let bsc_path = match bsc_path { Some(bsc_path) => bsc_path, None => helpers::get_bsc(&project_root, workspace_root.to_owned()), }; - let root_config_name = packages::get_package_name(&project_root); + let root_config_name = packages::get_package_name(&project_root)?; let rescript_version = helpers::get_rescript_version(&bsc_path); if show_progress { @@ -157,7 +147,7 @@ pub fn initialize_build( } let timing_package_tree = Instant::now(); - let packages = packages::make(filter, &project_root, &workspace_root); + let packages = packages::make(filter, &project_root, &workspace_root, show_progress)?; let timing_package_tree_elapsed = timing_package_tree.elapsed(); if show_progress { @@ -173,7 +163,7 @@ pub fn initialize_build( } if !packages::validate_packages_dependencies(&packages) { - return Err(InitializeBuildError::PackageDependencyValidation); + return Err(anyhow!("Failed to validate package dependencies")); } let timing_source_files = Instant::now(); @@ -435,27 +425,6 @@ pub fn incremental_build( } } -#[derive(Debug, Clone)] -pub enum BuildError { - InitializeBuild(InitializeBuildError), - IncrementalBuild(IncrementalBuildError), -} - -impl fmt::Display for BuildError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Self::InitializeBuild(e) => { - write!(f, "{} {}Error Initializing Build: {}", LINE_CLEAR, CROSS, e) - } - Self::IncrementalBuild(e) => write!( - f, - "{} {}Error Running Incremental Build: {}", - LINE_CLEAR, CROSS, e - ), - } - } -} - // write build.ninja files in the packages after a non-incremental build // this is necessary to bust the editor tooling cache. The editor tooling // is watching this file. @@ -477,7 +446,7 @@ pub fn build( no_timing: bool, create_sourcedirs: bool, bsc_path: Option, -) -> Result { +) -> Result { let default_timing: Option = if no_timing { Some(std::time::Duration::new(0.0 as u64, 0.0 as u32)) } else { @@ -485,7 +454,7 @@ pub fn build( }; let timing_total = Instant::now(); let mut build_state = initialize_build(default_timing, filter, show_progress, path, bsc_path) - .map_err(BuildError::InitializeBuild)?; + .map_err(|e| anyhow!("Could not initialize build. Error: {e}"))?; match incremental_build( &mut build_state, @@ -512,7 +481,7 @@ pub fn build( Err(e) => { clean::cleanup_after_build(&build_state); write_build_ninja(&build_state); - Err(BuildError::IncrementalBuild(e)) + Err(anyhow!("Incremental build failed. Error: {e}")) } } } diff --git a/src/build/clean.rs b/src/build/clean.rs index 25835f18..8101c584 100644 --- a/src/build/clean.rs +++ b/src/build/clean.rs @@ -3,6 +3,7 @@ use super::packages; use crate::helpers; use crate::helpers::emojis::*; use ahash::AHashSet; +use anyhow::Result; use console::style; use rayon::prelude::*; use std::io::Write; @@ -318,11 +319,11 @@ pub fn cleanup_after_build(build_state: &BuildState) { }); } -pub fn clean(path: &str, show_progress: bool, bsc_path: Option) { +pub fn clean(path: &str, show_progress: bool, bsc_path: Option) -> Result<()> { let project_root = helpers::get_abs_path(path); let workspace_root = helpers::get_workspace_root(&project_root); - let packages = packages::make(&None, &project_root, &workspace_root); - let root_config_name = packages::get_package_name(&project_root); + let packages = packages::make(&None, &project_root, &workspace_root, show_progress)?; + let root_config_name = packages::get_package_name(&project_root)?; let bsc_path = match bsc_path { Some(bsc_path) => bsc_path, None => helpers::get_bsc(&project_root, workspace_root.to_owned()), @@ -399,4 +400,6 @@ pub fn clean(path: &str, show_progress: bool, bsc_path: Option) { ); let _ = std::io::stdout().flush(); } + + Ok(()) } diff --git a/src/build/packages.rs b/src/build/packages.rs index 607a8209..c77334cf 100644 --- a/src/build/packages.rs +++ b/src/build/packages.rs @@ -5,6 +5,7 @@ use crate::config; use crate::helpers; use crate::helpers::emojis::*; use ahash::{AHashMap, AHashSet}; +use anyhow::Result; use console::style; use log::{debug, error}; use rayon::prelude::*; @@ -216,7 +217,7 @@ fn get_source_dirs(source: config::Source, sub_path: Option) -> AHashSe source_folders } -pub fn read_config(package_dir: &str) -> config::Config { +pub fn read_config(package_dir: &str) -> Result { let prefix = if package_dir.is_empty() { "".to_string() } else { @@ -290,6 +291,7 @@ fn read_dependencies( parent_path: &str, project_root: &str, workspace_root: Option, + show_progress: bool, ) -> Vec { return parent_config .bs_dependencies @@ -311,16 +313,34 @@ fn read_dependencies( let (config, canonical_path) = match read_dependency(package_name, parent_path, project_root, &workspace_root) { Err(error) => { - log::error!( + if show_progress { + println!( "{} {} Error building package tree. {}", style("[1/2]").bold().dim(), CROSS, error ); + } + + log::error!( + "We could not build package tree reading depencency '{package_name}', at path '{parent_path}'. Error: {error}", + ); + std::process::exit(2) } - Ok(canonical_path) => (read_config(&canonical_path), canonical_path), + Ok(canonical_path) => { + match read_config(&canonical_path) { + Ok(config) => (config, canonical_path), + Err(error) => { + log::error!( + "We could not build package tree '{package_name}', at path '{parent_path}', Error: {error}", + ); + std::process::exit(2) + } + } + } }; + let is_pinned = parent_config .pinned_dependencies .as_ref() @@ -333,6 +353,7 @@ fn read_dependencies( &canonical_path, project_root, workspace_root.to_owned(), + show_progress ); Dependency { @@ -397,8 +418,12 @@ fn make_package(config: config::Config, package_path: &str, is_pinned_dep: bool, } } -fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap { - let root_config = read_config(project_root); +fn read_packages( + project_root: &str, + workspace_root: Option, + show_progress: bool, +) -> Result> { + let root_config = read_config(project_root)?; // Store all packages and completely deduplicate them let mut map: AHashMap = AHashMap::new(); @@ -414,6 +439,7 @@ fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap project_root, project_root, workspace_root, + show_progress, )); dependencies.iter().for_each(|d| { if !map.contains_key(&d.name) { @@ -424,7 +450,7 @@ fn read_packages(project_root: &str, workspace_root: Option) -> AHashMap } }); - map + Ok(map) } /// `get_source_files` is essentially a wrapper around `read_structure`, which read a @@ -527,12 +553,14 @@ pub fn make( filter: &Option, root_folder: &str, workspace_root: &Option, -) -> AHashMap { - let map = read_packages(root_folder, workspace_root.to_owned()); + show_progress: bool, +) -> Result> { + let map = read_packages(root_folder, workspace_root.to_owned(), show_progress)?; /* Once we have the deduplicated packages, we can add the source files for each - to minimize * the IO */ let result = extend_with_children(filter, map); + result.values().for_each(|package| { if let Some(dirs) = &package.dirs { dirs.iter().for_each(|dir| { @@ -540,12 +568,14 @@ pub fn make( }) } }); - result + + Ok(result) } -pub fn get_package_name(path: &str) -> String { - let config = read_config(path); - config.name +pub fn get_package_name(path: &str) -> Result { + let config = read_config(path)?; + + Ok(config.name) } pub fn parse_packages(build_state: &mut BuildState) { diff --git a/src/config.rs b/src/config.rs index e95707e8..c575b90e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,5 +1,6 @@ use crate::build::packages; use crate::helpers::deserialize::*; +use anyhow::Result; use convert_case::{Case, Casing}; use serde::Deserialize; use std::fs; @@ -267,17 +268,11 @@ pub fn flatten_ppx_flags( } /// Try to convert a bsconfig from a certain path to a bsconfig struct -pub fn read(path: String) -> Config { - fs::read_to_string(path.clone()) - .map_err(|e| format!("Could not read bsconfig. {path} - {e}")) - // .and_then(|x| { - // dbg!(&x); - // repair(x).map_err(|e| format!("Json was invalid and could not be repaired. {path} - {e}")) - // }) - .and_then(|x| { - serde_json::from_str::(&x).map_err(|e| format!("Could not parse bsconfig. {path} - {e}")) - }) - .expect("Errors reading bsconfig") +pub fn read(path: String) -> Result { + let read = fs::read_to_string(path.clone())?; + let parse = serde_json::from_str::(&read)?; + + Ok(parse) } fn check_if_rescript11_or_higher(version: &str) -> Result { diff --git a/src/main.rs b/src/main.rs index 8e9fe683..565db3d9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,4 @@ +use anyhow::Result; use clap::{Parser, ValueEnum}; use clap_verbosity_flag::InfoLevel; use log::LevelFilter; @@ -72,7 +73,7 @@ struct Args { bsc_path: Option, } -fn main() { +fn main() -> Result<()> { let args = Args::parse(); let log_level_filter = args.verbose.log_level_filter(); @@ -93,7 +94,7 @@ fn main() { Some(path) => { println!( "{}", - build::get_compiler_args(&path, args.rescript_version, args.bsc_path) + build::get_compiler_args(&path, args.rescript_version, args.bsc_path)? ); std::process::exit(0); } @@ -139,6 +140,8 @@ fn main() { args.after_build, args.create_sourcedirs.unwrap_or(false), ); + + Ok(()) } }, } From eac37e2591393ef4a690c79b3974f6675fbce5be Mon Sep 17 00:00:00 2001 From: Roland Peelen Date: Wed, 20 Nov 2024 11:09:17 +0100 Subject: [PATCH 9/9] :camera_flash: - Update snapshots. --- tests/snapshots/dependency-cycle.txt | 2 +- tests/snapshots/remove-file.txt | 2 +- tests/snapshots/rename-file-internal-dep-namespace.txt | 2 +- tests/snapshots/rename-file-internal-dep.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/snapshots/dependency-cycle.txt b/tests/snapshots/dependency-cycle.txt index c0d889af..b30824a9 100644 --- a/tests/snapshots/dependency-cycle.txt +++ b/tests/snapshots/dependency-cycle.txt @@ -15,4 +15,4 @@ Can't continue... Found a circular dependency in your code: NewNamespace.NS_alias -> Dep01 -> Dep02 -> NS -> NewNamespace.NS_alias ERROR: - ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above +Incremental build failed. Error:  ️🛑 Failed to Compile. See Errors Above diff --git a/tests/snapshots/remove-file.txt b/tests/snapshots/remove-file.txt index be083fa9..344d8d3d 100644 --- a/tests/snapshots/remove-file.txt +++ b/tests/snapshots/remove-file.txt @@ -28,4 +28,4 @@ ERROR: ERROR: - ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above +Incremental build failed. Error:  ️🛑 Failed to Compile. See Errors Above diff --git a/tests/snapshots/rename-file-internal-dep-namespace.txt b/tests/snapshots/rename-file-internal-dep-namespace.txt index fc6c41fe..b251dbce 100644 --- a/tests/snapshots/rename-file-internal-dep-namespace.txt +++ b/tests/snapshots/rename-file-internal-dep-namespace.txt @@ -28,4 +28,4 @@ ERROR: ERROR: - ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above +Incremental build failed. Error:  ️🛑 Failed to Compile. See Errors Above diff --git a/tests/snapshots/rename-file-internal-dep.txt b/tests/snapshots/rename-file-internal-dep.txt index 40c6babe..4e1d142c 100644 --- a/tests/snapshots/rename-file-internal-dep.txt +++ b/tests/snapshots/rename-file-internal-dep.txt @@ -28,4 +28,4 @@ ERROR: ERROR: - ️🛑 Error Running Incremental Build:  ️🛑 Failed to Compile. See Errors Above +Incremental build failed. Error:  ️🛑 Failed to Compile. See Errors Above