diff --git a/crates/metassr-build/src/client/mod.rs b/crates/metassr-build/src/client/mod.rs index 6fcfbcf..0e1f5bb 100644 --- a/crates/metassr-build/src/client/mod.rs +++ b/crates/metassr-build/src/client/mod.rs @@ -5,7 +5,11 @@ use hydrator::Hydrator; use metassr_bundler::WebBundler; use metassr_utils::{ - cache_dir::CacheDir, src_analyzer::special_entries, src_analyzer::SourceDir, traits::AnalyzeDir, + analyzer::{ + src_dir::{special_entries, SourceDir}, + DirectoryAnalyzer, + }, + cache_dir::CacheDir, }; use std::{ collections::HashMap, diff --git a/crates/metassr-build/src/server/manifest.rs b/crates/metassr-build/src/server/manifest.rs index e108c5a..53fcf38 100644 --- a/crates/metassr-build/src/server/manifest.rs +++ b/crates/metassr-build/src/server/manifest.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use metassr_utils::{ + analyzer::dist_dir::{DistDirContainer, PageEntry}, cache_dir::CacheDir, - dist_analyzer::{DistDirContainer, PageEntry}, }; use serde::{Deserialize, Serialize}; use serde_json::to_string_pretty; @@ -121,7 +121,7 @@ impl ManifestGenerator { pub fn generate + ?Sized>(&self, head: &H) -> Result { let global = GlobalEntry::new(head, &self.cache.dir_path())?; let mut manifest = Manifest::new(global); - + for (path, &id) in self.targets.iter() { let route = match path .strip_prefix(self.cache.dir_path().join("pages"))? diff --git a/crates/metassr-build/src/server/mod.rs b/crates/metassr-build/src/server/mod.rs index 06f9fa0..45d7a65 100644 --- a/crates/metassr-build/src/server/mod.rs +++ b/crates/metassr-build/src/server/mod.rs @@ -11,10 +11,12 @@ use manifest::ManifestGenerator; use metassr_bundler::WebBundler; use metassr_utils::{ + analyzer::{ + dist_dir::DistDir, + src_dir::{special_entries, SourceDir}, + DirectoryAnalyzer, + }, cache_dir::CacheDir, - dist_analyzer::DistDir, - src_analyzer::{special_entries, SourceDir}, - traits::AnalyzeDir, }; use pages_generator::PagesGenerator; use renderer::head::HeadRenderer; diff --git a/crates/metassr-build/src/server/pages_generator.rs b/crates/metassr-build/src/server/pages_generator.rs index b8fe4ea..a98e9b1 100644 --- a/crates/metassr-build/src/server/pages_generator.rs +++ b/crates/metassr-build/src/server/pages_generator.rs @@ -6,9 +6,11 @@ use std::{ use anyhow::{anyhow, Result}; use metassr_utils::{ + analyzer::{ + dist_dir::{DistDir, DistDirContainer}, + DirectoryAnalyzer, + }, cache_dir::CacheDir, - dist_analyzer::{DistDir, DistDirContainer}, - traits::AnalyzeDir, }; use crate::traits::Exec; diff --git a/crates/metassr-build/src/server/renderer/html.rs b/crates/metassr-build/src/server/renderer/html.rs index e96d88d..dd16f68 100644 --- a/crates/metassr-build/src/server/renderer/html.rs +++ b/crates/metassr-build/src/server/renderer/html.rs @@ -6,7 +6,7 @@ use html_generator::{ html_props::HtmlProps, template::HtmlTemplate, }; -use metassr_utils::dist_analyzer::PageEntry; +use metassr_utils::analyzer::dist_dir::PageEntry; pub struct HtmlRenderer<'a> { head: String, diff --git a/crates/metassr-build/src/server/renderer/page.rs b/crates/metassr-build/src/server/renderer/page.rs index a57f559..d2ba4b5 100644 --- a/crates/metassr-build/src/server/renderer/page.rs +++ b/crates/metassr-build/src/server/renderer/page.rs @@ -1,7 +1,7 @@ use std::ffi::OsStr; use anyhow::Result; -use metassr_utils::{cache_dir::CacheDir, dist_analyzer::PageEntry}; +use metassr_utils::{analyzer::dist_dir::PageEntry, cache_dir::CacheDir}; use crate::{ server::{manifest::Manifest, render_exec::RenderExec}, diff --git a/crates/metassr-build/src/server/targets.rs b/crates/metassr-build/src/server/targets.rs index 8e1ce14..60eb020 100644 --- a/crates/metassr-build/src/server/targets.rs +++ b/crates/metassr-build/src/server/targets.rs @@ -4,7 +4,7 @@ use std::{ }; use anyhow::Result; -use metassr_utils::{cache_dir::CacheDir, src_analyzer::PagesEntriesType}; +use metassr_utils::{analyzer::src_dir::PagesEntriesType, cache_dir::CacheDir}; use crate::{traits::Generate, utils::setup_page_path}; diff --git a/crates/metassr-server/src/handler.rs b/crates/metassr-server/src/handler.rs index 7e5b975..5ca2a71 100644 --- a/crates/metassr-server/src/handler.rs +++ b/crates/metassr-server/src/handler.rs @@ -5,9 +5,9 @@ use axum::{ routing::get, }; use metassr_build::server::renderer::page::PageRenderer; -use metassr_utils::{ - dist_analyzer::{DistDir, PageEntry}, - traits::AnalyzeDir, +use metassr_utils::analyzer::{ + dist_dir::{DistDir, PageEntry}, + DirectoryAnalyzer, }; use std::{collections::HashMap, fs::read_to_string, path::PathBuf}; diff --git a/crates/metassr-utils/src/analyzer/dist_dir.rs b/crates/metassr-utils/src/analyzer/dist_dir.rs new file mode 100644 index 0000000..1bc97b5 --- /dev/null +++ b/crates/metassr-utils/src/analyzer/dist_dir.rs @@ -0,0 +1,421 @@ +use super::DirectoryAnalyzer; +use anyhow::{anyhow, Result}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::HashMap, + ffi::OsStr, + marker::Sized, + path::{Path, PathBuf}, +}; +use walkdir::WalkDir; + +/// `DistDirContainer` is a structure that holds the analyzing results for the `dist/` directory. +/// It contains a `HashMap` where the keys are page names (directories) and the values are `PageEntry` structures. +/// +/// # Example +/// +/// ```no_run +/// use metassr_utils::analyzer::dist_dir::{DistDirContainer, PageEntry}; +/// use std::{collections::HashMap, path::PathBuf}; +/// +/// let mut container = DistDirContainer { +/// pages: HashMap::new(), +/// }; +/// +/// let page_entry = PageEntry::new(PathBuf::from("/dist/pages/home.js")); +/// container.pages.insert("home".to_string(), page_entry); +/// +/// println!("{:?}", container.pages.get("home")); +/// ``` +#[derive(Debug)] +pub struct DistDirContainer { + pub pages: HashMap, // Maps page paths to page entries +} + +/// `PageEntry` represents the details for each page found in the `dist/` directory. +/// It includes the paths for JavaScript and CSS files (scripts and styles). +/// +/// # Example +/// +/// ```no_run +/// use metassr_utils::analyzer::dist_dir::PageEntry; +/// use std::path::PathBuf; +/// +/// let mut page_entry = PageEntry::new(PathBuf::from("/dist/pages/home")); +/// +/// page_entry.push_script(&PathBuf::from("/dist/pages/home/main.js")); +/// page_entry.push_style(&PathBuf::from("/dist/pages/home/main.css")); +/// +/// println!("{:?}", page_entry); +/// ``` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PageEntry { + pub scripts: Vec, // List of paths to JavaScript files for the page + pub styles: Vec, // List of paths to CSS files for the page + pub path: PathBuf, // The actual path of the page (directory) +} + +impl PageEntry { + /// Creates a new `PageEntry` given the path to a page. + /// + /// # Example + /// + /// ```no_run + /// use metassr_utils::analyzer::dist_dir::PageEntry; + /// use std::path::PathBuf; + /// + /// let page_entry = PageEntry::new(PathBuf::from("/dist/pages/home")); + /// println!("{:?}", page_entry.path); + /// ``` + pub fn new(path: PathBuf) -> Self { + Self { + scripts: vec![], // Initialize with an empty list of scripts + styles: vec![], // Initialize with an empty list of styles + path, // Set the path of the page + } + } + + /// Adds a script (JavaScript file) to the `scripts` list for the page. + /// + /// # Example + /// + /// ```no_run + /// use metassr_utils::analyzer::dist_dir::PageEntry; + /// use std::path::PathBuf; + /// + /// let mut page_entry = PageEntry::new(PathBuf::from("/dist/pages/home")); + /// page_entry.push_script(&PathBuf::from("/dist/pages/home/main.js")); + /// println!("{:?}", page_entry.scripts); + /// ``` + pub fn push_script(&mut self, path: &Path) { + self.scripts.push(path.to_path_buf()); + } + + /// Adds a style (CSS file) to the `styles` list for the page. + /// + /// # Example + /// + /// ```no_run + /// use metassr_utils::analyzer::dist_dir::PageEntry; + /// use std::path::PathBuf; + /// + /// let mut page_entry = PageEntry::new(PathBuf::from("/dist/pages/home")); + /// page_entry.push_style(&PathBuf::from("/dist/pages/home/style.css")); + /// println!("{:?}", page_entry.styles); + /// ``` + pub fn push_style(&mut self, path: &Path) { + self.styles.push(path.to_path_buf()); + } +} + +/// `DistDir` is responsible for analyzing the `dist/` directory, +/// which is typically a folder generated by a bundler like `rspack` that contains JavaScript and CSS files. +/// It extracts and organizes the page structure based on the files inside the directory. +/// +/// # Example +/// +/// ```no_run +/// use metassr_utils::analyzer::{dist_dir::DistDir, DirectoryAnalyzer}; +/// +/// let dist_dir = DistDir::new("path/to/dist").unwrap(); +/// let analysis_result = dist_dir.analyze().unwrap(); +/// +/// for (page, entry) in analysis_result.pages { +/// println!("Page: {}, Scripts: {:?}, Styles: {:?}", page, entry.scripts, entry.styles); +/// } +/// ``` +#[derive(Debug)] +pub struct DistDir(PathBuf); // Contains the path to the `dist/` directory + +impl DistDir { + /// Creates a new `DistDir` object given a path to the `dist/` directory. + /// Returns an error if the provided path does not exist. + /// + /// # Example + /// + /// ```no_run + /// use metassr_utils::analyzer::dist_dir::DistDir; + /// + /// let dist_dir = DistDir::new("path/to/dist"); + /// match dist_dir { + /// Ok(dir) => println!("Dist directory found!"), + /// Err(e) => println!("Error: {}", e), + /// } + /// ``` + pub fn new(path: &S) -> Result + where + S: AsRef + ?Sized, // Accepts types that can be referenced as `OsStr` + { + let path = PathBuf::from(path); + + // Check if the path exists, and return an error if not foundd + if !path.exists() { + return Err(anyhow!("Dist directory not found: {path:#?}")); + } + + Ok(Self(path)) + } +} + +/// `AnalyzeDir` trait is implemented for `DistDir`. +/// This implementation allows analyzing the directory and extracting its structure. +/// +/// # Example +/// +/// ```no_run +/// use metassr_utils::analyzer::{dist_dir::DistDir, DirectoryAnalyzer}; +/// +/// let dist_dir = DistDir::new("/path/to/dist").unwrap(); +/// let result = dist_dir.analyze().unwrap(); +/// for (page, entry) in result.pages { +/// println!("Page: {}, Scripts: {:?}, Styles: {:?}", page, entry.scripts, entry.styles); +/// } +/// ``` +impl DirectoryAnalyzer for DistDir { + type Output = DistDirContainer; // The output of the analysis is a `DistDirContainer` + + /// Analyzes the `dist/` directory to find and organize JavaScript and CSS files + /// inside the `pages/` subdirectory, if it exists. + /// + /// # Example + /// + /// ```no_run + /// use metassr_utils::analyzer::{dist_dir::DistDir, DirectoryAnalyzer}; + /// + /// let dist_dir = DistDir::new("path/to/dist").unwrap(); + /// let analysis_result = dist_dir.analyze().unwrap(); + /// + /// for (page, entry) in analysis_result.pages { + /// println!("Page: {}, Scripts: {:?}, Styles: {:?}", page, entry.scripts, entry.styles); + /// } + /// ``` + fn analyze(&self) -> Result { + let pages_path = self.0.join("pages"); // Define the path to the `pages` directory inside `dist` + let mut pages: HashMap = HashMap::new(); // Create a `HashMap` to store pages + + // Traverse the `pages` directory recursively and filter out files based on extensions (js, css) + for entry in WalkDir::new(pages_path.clone()) + .into_iter() + .filter_map(|e| { + let exts = ["js", "css"]; + match e.ok() { + Some(e) + if e.path().is_file() + && exts.contains(&e.path().extension().unwrap().to_str().unwrap()) => + { + Some(e) // Include files that are either JS or CSS + } + _ => None, + } + }) + { + let path = entry.path(); // Get the path of the current file + let parent = path.parent().unwrap(); // Get the parent directory of the file (the page directory) + + // Strip the `pages_path` prefix from the parent directory path to get a relative path + let parent_stripped = match parent.strip_prefix(pages_path.clone()).unwrap() { + p if p == Path::new("") => "#root", // If no parent, set the root identifier + p => p.to_str().unwrap(), // Otherwise, use the relative path as the identifier + }; + + let ext = path.extension().unwrap().to_str().unwrap(); // Get the file extension (js or css) + + // If the page does not already exist in the `pages` map, insert a new `PageEntry` + if !pages.contains_key(parent_stripped) { + pages.insert( + parent_stripped.to_owned(), + PageEntry::new(parent.to_path_buf().canonicalize().unwrap()), // Add canonicalized parent path + ); + }; + + let page_entry = pages.get_mut(parent_stripped).unwrap(); // Get the `PageEntry` for the current page + + // Depending on the file extension, add the file to either `scripts` or `styles` + match ext { + "js" => (*page_entry).push_script(path), + "css" => (*page_entry).push_style(path), + _ => (), + } + } + + // Return the analyzed pages in a `DistDirContainer` + Ok(Self::Output { pages }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::rand::Rand; + use std::fs; + /// Helper function to create a temporary directory structure for testing + fn setup_test_dist_dir() -> PathBuf { + let tmp_dir = std::env::temp_dir() + .join(&Rand::new().val().to_string()) + .join("test-dist"); + let pages_dir = tmp_dir.join("pages"); + + // Clear any previous test data + if pages_dir.exists() { + fs::remove_dir_all(&pages_dir).unwrap(); + } + + fs::create_dir_all(&pages_dir).unwrap(); + + // Create some test files + fs::write(pages_dir.join("index.js"), "// JavaScript file").unwrap(); + fs::write(pages_dir.join("style.css"), "/* CSS file */").unwrap(); + fs::create_dir(pages_dir.join("subdir")).unwrap(); + fs::write( + pages_dir.join("subdir").join("script.js"), + "// Subdir JS file", + ) + .unwrap(); + fs::write( + pages_dir.join("subdir").join("substyle.css"), + "/* Subdir CSS file */", + ) + .unwrap(); + dbg!(&tmp_dir.canonicalize(), &pages_dir.canonicalize()); + tmp_dir + } + + /// Clean up the test directory after tests + fn cleanup_test_dist_dir(test_dir: PathBuf) { + if test_dir.exists() { + fs::remove_dir_all(test_dir).unwrap(); + } + } + + #[test] + fn test_analyze_valid_dist_dir() { + let test_dir = setup_test_dist_dir(); + let dist_dir = DistDir::new(&test_dir).unwrap(); + + let result = dist_dir.analyze().unwrap(); + let pages = result.pages; + + // Ensure correct number of pages + assert_eq!( + pages.len(), + 2, + "Expected 2 page entries, found {}", + pages.len() + ); + + // Validate root page + let root_page = pages.get("#root").expect("Root page should exist"); + assert_eq!(root_page.scripts.len(), 1, "Expected 1 script in root page"); + assert_eq!(root_page.styles.len(), 1, "Expected 1 style in root page"); + + // Validate subdir page + let subdir_page = pages.get("subdir").expect("Subdir page should exist"); + assert_eq!( + subdir_page.scripts.len(), + 1, + "Expected 1 script in subdir page" + ); + assert_eq!( + subdir_page.styles.len(), + 1, + "Expected 1 style in subdir page" + ); + + cleanup_test_dist_dir(test_dir); + } + + #[test] + fn test_dist_dir_not_found() { + let invalid_path = std::env::temp_dir().join("invalid-dist"); + let result = DistDir::new(&invalid_path); + + assert!( + result.is_err(), + "Expected error when dist directory is not found" + ); + if let Err(err) = result { + assert!( + err.to_string().contains("Dist directory not found"), + "Unexpected error message: {}", + err + ); + } + } + + #[test] + fn test_empty_dist_dir() { + let test_dir = std::env::temp_dir().join("empty-test-dist"); + fs::create_dir_all(test_dir.join("pages")).unwrap(); // Create empty pages directory + + let dist_dir = DistDir::new(&test_dir).unwrap(); + let result = dist_dir.analyze().unwrap(); + + assert!( + result.pages.is_empty(), + "Expected no page entries in an empty dist directory" + ); + + cleanup_test_dist_dir(test_dir); + } + + #[test] + fn test_analyze_unsupported_file_extensions() { + let test_dir = setup_test_dist_dir(); + + // Create a file with an unsupported extension + fs::write( + test_dir.join("pages").join("unsupported.txt"), + "Unsupported file", + ) + .unwrap(); + + let dist_dir = DistDir::new(&test_dir).unwrap(); + let result = dist_dir.analyze().unwrap(); + + // Ensure the unsupported file is ignored + assert_eq!( + result.pages.len(), + 2, + "Expected 2 page entries, found {}", + result.pages.len() + ); + + cleanup_test_dist_dir(test_dir); + } + + #[test] + fn test_analyze_dist_dir_with_only_scripts() { + let test_dir = std::env::temp_dir().join("script-only-test-dist"); + let pages_dir = test_dir.join("pages"); + + fs::create_dir_all(&pages_dir).unwrap(); + fs::write(pages_dir.join("script.js"), "// JavaScript file").unwrap(); + + let dist_dir = DistDir::new(&test_dir).unwrap(); + let result = dist_dir.analyze().unwrap(); + + let root_page = result.pages.get("#root").expect("Root page should exist"); + assert_eq!(root_page.scripts.len(), 1, "Expected 1 script file"); + assert!(root_page.styles.is_empty(), "Expected no styles"); + + cleanup_test_dist_dir(test_dir); + } + + #[test] + fn test_analyze_dist_dir_with_only_styles() { + let test_dir = std::env::temp_dir().join("style-only-test-dist"); + let pages_dir = test_dir.join("pages"); + + fs::create_dir_all(&pages_dir).unwrap(); + fs::write(pages_dir.join("style.css"), "/* CSS file */").unwrap(); + + let dist_dir = DistDir::new(&test_dir).unwrap(); + let result = dist_dir.analyze().unwrap(); + + let root_page = result.pages.get("#root").expect("Root page should exist"); + assert_eq!(root_page.styles.len(), 1, "Expected 1 style file"); + assert!(root_page.scripts.is_empty(), "Expected no scripts"); + + cleanup_test_dist_dir(test_dir); + } +} diff --git a/crates/metassr-utils/src/traits.rs b/crates/metassr-utils/src/analyzer/mod.rs similarity index 56% rename from crates/metassr-utils/src/traits.rs rename to crates/metassr-utils/src/analyzer/mod.rs index a83786c..7f5bf65 100644 --- a/crates/metassr-utils/src/traits.rs +++ b/crates/metassr-utils/src/analyzer/mod.rs @@ -1,5 +1,9 @@ +pub mod dist_dir; +pub mod src_dir; + + use anyhow::Result; -pub trait AnalyzeDir { +pub trait DirectoryAnalyzer { type Output; fn analyze(&self) -> Result; } diff --git a/crates/metassr-utils/src/analyzer/src_dir.rs b/crates/metassr-utils/src/analyzer/src_dir.rs new file mode 100644 index 0000000..2ef30bc --- /dev/null +++ b/crates/metassr-utils/src/analyzer/src_dir.rs @@ -0,0 +1,223 @@ +use super::DirectoryAnalyzer; +use anyhow::{anyhow, Result}; +use std::{collections::HashMap, ffi::OsStr, marker::Sized, path::PathBuf}; +use walkdir::WalkDir; + +/// Wrappers for special entries that collected by the source analyzer +pub mod special_entries { + use std::path::PathBuf; + + /// Represents a special entry for the `_app.[js, jsx, ts, tsx]` file. + #[derive(Debug, Clone)] + pub struct App(pub PathBuf); + + /// Represents a special entry for the `_head.[js, jsx, ts, tsx]` file. + #[derive(Debug, Clone)] + pub struct Head(pub PathBuf); +} + +pub type PagesEntriesType = HashMap; +pub type SpecialEntriesType = (Option, Option); + +/// A container holding the results of analyzing a source directory. +/// +/// This struct holds the pages and special entries found in the source directory. +#[derive(Debug, Clone)] +pub struct SourceDirContainer { + pub pages: PagesEntriesType, + pub specials: SpecialEntriesType, +} + +impl SourceDirContainer { + /// Creates a new `SourceDirContainer` with the given pages and special entries. + /// + /// # Parameters + /// + /// - `pages`: A `HashMap` where keys are routes and values are paths to page files. + /// - `specials`: A tuple containing optional special entries (`App` and `Head`). + pub fn new(pages: PagesEntriesType, specials: SpecialEntriesType) -> Self { + Self { pages, specials } + } + + /// Retrieves the special entries from the container. + /// + /// # Returns + /// + /// Returns a `Result` containing a tuple of `App` and `Head` if both are present, + /// or an error if one or both are missing. + pub fn specials(&self) -> Result<(special_entries::App, special_entries::Head)> { + let (app, head) = self.specials.clone(); + if let (Some(app), Some(head)) = (app.clone(), head.clone()) { + return Ok((app, head)); + } + let mut not_found = vec![]; + if app.is_none() { + not_found.push("_app.[js,jsx,ts,tsx]") + } + if head.is_none() { + not_found.push("_head.[js,jsx,ts,tsx]") + } + Err(anyhow!( + "Couldn't find: {}. Create the files that have not been found.", + not_found.join(", ") + )) + } + + /// Retrieves the pages entries from the container. + /// + /// # Returns + /// + /// Returns a `HashMap` where keys are routes and values are paths to page files. + pub fn pages(&self) -> PagesEntriesType { + self.pages.clone() + } +} + +/// A directory analyzer for a source directory. +/// +/// This struct provides functionality to analyze a directory and extract pages and special entries. +#[derive(Debug)] +pub struct SourceDir(PathBuf); + +impl SourceDir { + /// Creates a new `SourceDir` instance. + /// + /// # Parameters + /// + /// - `path`: The path to the source directory. + pub fn new(path: &S) -> Self + where + S: AsRef + ?Sized, + { + Self(PathBuf::from(path)) + } +} + +impl DirectoryAnalyzer for SourceDir { + type Output = SourceDirContainer; + + /// Analyzes the source directory and extracts pages and special entries. + /// + /// # Returns + /// + /// Returns a `Result` containing a `SourceDirContainer` with pages and special entries. + fn analyze(&self) -> Result { + let src = self.0.to_str().unwrap(); + + let list_of_specials = ["_app", "_head"]; + let mut pages: HashMap = HashMap::new(); + let mut specials: SpecialEntriesType = (None, None); + + for entry in WalkDir::new(src) + .into_iter() + .filter_map(|e| match e.ok() { + Some(e) if e.path().is_file() => Some(e), + _ => None, + }) + .skip_while(|e| { + // Check if the entry is a js/ts file. + let exts: Vec<&str> = vec!["js", "jsx", "tsx", "ts"]; + !exts.contains(&e.path().extension().unwrap().to_str().unwrap()) + }) + { + let path = entry.path(); + let stem = path.file_stem().unwrap().to_str().unwrap(); + let stripped = path.strip_prefix(src)?; + + match stripped.iter().next() { + Some(_) if list_of_specials.contains(&stem) => match stem { + "_app" => specials.0 = Some(special_entries::App(path.to_path_buf())), + "_head" => specials.1 = Some(special_entries::Head(path.to_path_buf())), + _ => (), + }, + + Some(p) if p == OsStr::new("pages") => { + let route = path + .strip_prefix([src, "/pages"].concat())? + .to_str() + .unwrap(); + pages.insert(route.to_owned(), path.to_path_buf()); + } + + _ => (), + } + } + + let container = SourceDirContainer::new(pages, specials); + + // Return an error if specials not found. + if let Err(err) = container.specials() { + return Err(anyhow!(err)); + } + + Ok(container) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::rand::Rand; + use std::fs; + use std::path::PathBuf; + + /// Helper function to create a temporary source directory with a random name. + fn create_temp_source_dir() -> Result { + let dir_path = PathBuf::from(format!("src-{}", Rand::new().val())); + fs::create_dir_all(&dir_path)?; + Ok(SourceDir::new(&dir_path)) + } + + /// Test case to verify the creation and analysis of a source directory. + #[test] + fn test_create_and_analyze_source_dir() { + let source_dir = create_temp_source_dir().unwrap(); + let pages = vec!["page1.jsx", "page2.tsx"]; + let specials = vec!["_app.jsx", "_head.tsx"]; + + for page in pages.iter() { + let path = source_dir.0.join("pages").join(page); + fs::create_dir_all(path.parent().unwrap()).unwrap(); + fs::write(&path, b"dummy content").unwrap(); + } + + for special in specials.iter() { + let path = source_dir.0.join(special); + fs::write(&path, b"dummy content").unwrap(); + } + + let result = source_dir.analyze().unwrap(); + assert_eq!(result.pages().len(), pages.len()); + assert!(result.specials().is_ok()); + + // Cleanup + for page in pages.iter() { + let path = source_dir.0.join("pages").join(page); + fs::remove_file(&path).unwrap(); + } + for special in specials.iter() { + let path = source_dir.0.join(special); + fs::remove_file(&path).unwrap(); + } + fs::remove_dir_all(source_dir.0).unwrap(); + } + + /// Test case to verify handling of missing special entries. + #[test] + fn test_missing_special_entries() { + let source_dir = create_temp_source_dir().unwrap(); + let page_path = source_dir.0.join("pages/page1.jsx"); + fs::create_dir_all(page_path.parent().unwrap()).unwrap(); + fs::write(&page_path, b"dummy content").unwrap(); + + let result = source_dir.analyze(); + assert!( + result.is_err(), + "Should return an error due to missing special entries" + ); + + // Cleanup + fs::remove_file(&page_path).unwrap(); + fs::remove_dir_all(source_dir.0).unwrap(); + } +} diff --git a/crates/metassr-utils/src/dist_analyzer.rs b/crates/metassr-utils/src/dist_analyzer.rs deleted file mode 100644 index 5bd244c..0000000 --- a/crates/metassr-utils/src/dist_analyzer.rs +++ /dev/null @@ -1,122 +0,0 @@ -use crate::traits::AnalyzeDir; -use anyhow::{anyhow, Result}; -use serde::{Deserialize, Serialize}; -use std::{ - collections::HashMap, - ffi::OsStr, - marker::Sized, - path::{Path, PathBuf}, -}; -use walkdir::WalkDir; -/// A container contains analyzing result for `dist/` directory. -#[derive(Debug)] -pub struct DistDirContainer { - pub pages: HashMap, -} - -/// The page entry, where each pages details stored. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct PageEntry { - pub scripts: Vec, - pub styles: Vec, - pub path: PathBuf, -} - -impl PageEntry { - pub fn new(path: PathBuf) -> Self { - Self { - scripts: vec![], - styles: vec![], - path, - } - } - pub fn push_script(&mut self, path: &Path) { - self.scripts.push(path.to_path_buf()); - } - pub fn push_style(&mut self, path: &Path) { - self.styles.push(path.to_path_buf()); - } -} - -/// A simple analyzer for `dist/` directory to extract script files and style files, a bundled files generated using `rspack`. -#[derive(Debug)] -pub struct DistDir(PathBuf); - -impl DistDir { - pub fn new(path: &S) -> Result - where - S: AsRef + ?Sized, - { - let path = PathBuf::from(path); - if !path.exists() { - return Err(anyhow!("Dist directory not found: {path:#?}")); - } - - Ok(Self(path)) - } -} - -impl AnalyzeDir for DistDir { - type Output = DistDirContainer; - fn analyze(&self) -> Result { - let pages_path = self.0.join("pages"); - let mut pages: HashMap = HashMap::new(); - - for entry in WalkDir::new(pages_path.clone()) - .into_iter() - .filter_map(|e| { - let exts = ["js", "css"]; - match e.ok() { - Some(e) - if e.path().is_file() - && exts.contains(&e.path().extension().unwrap().to_str().unwrap()) => - { - Some(e) - } - _ => None, - } - }) - { - let path = entry.path(); - let parent = path.parent().unwrap(); - - let parent_stripped = match parent.strip_prefix(pages_path.clone()).unwrap() { - p if p == Path::new("") => "#root", - p => p.to_str().unwrap(), - }; - let ext = path.extension().unwrap().to_str().unwrap(); - // let stem = path.file_stem().unwrap().to_str().unwrap(); - // let stripped = path.strip_prefix(src)?; - if !pages.contains_key(parent_stripped) { - pages.insert( - parent_stripped.to_owned(), - PageEntry::new(parent.to_path_buf().canonicalize().unwrap()), - ); - }; - - let page = pages.get_mut(parent_stripped).unwrap(); - match ext { - "js" => (*page).push_script(path), - "css" => { - (*page).push_style(path); - } - _ => (), - } - } - - Ok(Self::Output { pages }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - dbg!(&DistDir::new("../../tests/web-app/dist") - .unwrap() - .analyze() - .unwrap()); - } -} diff --git a/crates/metassr-utils/src/lib.rs b/crates/metassr-utils/src/lib.rs index dc81d5e..7024b87 100644 --- a/crates/metassr-utils/src/lib.rs +++ b/crates/metassr-utils/src/lib.rs @@ -1,6 +1,4 @@ +pub mod analyzer; pub mod cache_dir; pub mod checker; -pub mod dist_analyzer; pub mod rand; -pub mod src_analyzer; -pub mod traits; diff --git a/crates/metassr-utils/src/src_analyzer.rs b/crates/metassr-utils/src/src_analyzer.rs deleted file mode 100644 index ec9e264..0000000 --- a/crates/metassr-utils/src/src_analyzer.rs +++ /dev/null @@ -1,122 +0,0 @@ -use crate::traits::AnalyzeDir; -use anyhow::{anyhow, Result}; -use std::{collections::HashMap, ffi::OsStr, marker::Sized, path::PathBuf}; -use walkdir::WalkDir; - -pub mod special_entries { - use std::path::PathBuf; - - #[derive(Debug, Clone)] - pub struct Head(pub PathBuf); - #[derive(Debug, Clone)] - pub struct App(pub PathBuf); -} - -pub type PagesEntriesType = HashMap; -pub type SpecialEntriesType = (Option, Option); -#[derive(Debug, Clone)] - -pub struct SourceDirContainer { - pub pages: PagesEntriesType, - pub specials: SpecialEntriesType, -} - -impl SourceDirContainer { - pub fn new(pages: PagesEntriesType, specials: SpecialEntriesType) -> Self { - Self { pages, specials } - } - - pub fn specials(&self) -> Result<(special_entries::App, special_entries::Head)> { - let (app, head) = self.specials.clone(); - if let (Some(app), Some(head)) = (app.clone(), head.clone()) { - return Ok((app, head)); - } - let mut not_found = vec![]; - if app.is_none() { - not_found.push("_app.[js,jsx,ts,tsx]") - } - if head.is_none() { - not_found.push("_head.[js,jsx,ts,tsx]") - } - Err(anyhow!( - "Couldn't found: {}. Create the files that have not been found.", - not_found.join(", ") - )) - } - - pub fn pages(&self) -> PagesEntriesType { - self.pages.clone() - } -} - -#[derive(Debug)] -pub struct SourceDir(PathBuf); - -impl SourceDir { - pub fn new(path: &S) -> Self - where - S: AsRef + ?Sized, - { - Self(PathBuf::from(path)) - } -} - -impl AnalyzeDir for SourceDir { - type Output = SourceDirContainer; - fn analyze(&self) -> Result { - let src = self.0.to_str().unwrap(); - - let list_of_specials = ["_app", "_head"]; - let mut pages: HashMap = HashMap::new(); - let mut specials: SpecialEntriesType = (None, None); - - for entry in WalkDir::new(src) - .into_iter() - .filter_map(|e| match e.ok() { - Some(e) if e.path().is_file() => Some(e), - _ => None, - }) - .skip_while(|e| { - // Check if the entry is a js/ts file. - let exts: Vec<&str> = vec!["js", "jsx", "tsx", "ts"]; - !exts.contains(&e.path().extension().unwrap().to_str().unwrap()) - }) - { - let path = entry.path(); - let stem = path.file_stem().unwrap().to_str().unwrap(); - let stripped = path.strip_prefix(src)?; - - match stripped.iter().next() { - Some(_) if list_of_specials.contains(&stem) => { - match stem { - "_app" => specials.0 = Some(special_entries::App(path.to_path_buf())), - "_head" => specials.1 = Some(special_entries::Head(path.to_path_buf())), - _ => (), - } - } - - Some(p) if p == OsStr::new("pages") => { - let route = path - .strip_prefix([src, "/pages"].concat())? - .to_str() - .unwrap(); - pages.insert(route.to_owned(), path.to_path_buf()); - } - - _ => (), - } - } - - Ok(SourceDirContainer::new(pages, specials)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - dbg!(&SourceDir::new("../../tests/web-app/src").analyze().unwrap()); - } -}