From 1e327687fc78cbc4110e9fae1bbc526b408ac801 Mon Sep 17 00:00:00 2001 From: LingyuCoder Date: Tue, 17 Dec 2024 12:05:08 +0800 Subject: [PATCH] refactor: improve storage error message --- .../src/cache/persistent/storage/mod.rs | 4 +- crates/rspack_storage/src/lib.rs | 2 +- crates/rspack_storage/src/pack/data/scope.rs | 8 +- crates/rspack_storage/src/pack/fs/error.rs | 114 ++++++-- crates/rspack_storage/src/pack/fs/mod.rs | 52 ++-- crates/rspack_storage/src/pack/manager/mod.rs | 44 ++-- crates/rspack_storage/src/pack/mod.rs | 4 +- .../rspack_storage/src/pack/strategy/mod.rs | 142 ++++++---- .../src/pack/strategy/split/handle_file.rs | 248 ++++++++---------- .../src/pack/strategy/split/mod.rs | 29 +- .../src/pack/strategy/split/read_scope.rs | 56 ++-- .../src/pack/strategy/split/util.rs | 16 +- .../src/pack/strategy/split/validate_scope.rs | 70 +++-- .../src/pack/strategy/split/write_pack.rs | 4 +- .../src/pack/strategy/split/write_scope.rs | 28 +- crates/rspack_storage/tests/build.rs | 14 +- crates/rspack_storage/tests/dev.rs | 14 +- crates/rspack_storage/tests/error.rs | 22 +- crates/rspack_storage/tests/expire.rs | 14 +- crates/rspack_storage/tests/lock.rs | 30 +-- crates/rspack_storage/tests/multi.rs | 14 +- 21 files changed, 552 insertions(+), 377 deletions(-) diff --git a/crates/rspack_core/src/cache/persistent/storage/mod.rs b/crates/rspack_core/src/cache/persistent/storage/mod.rs index 28cb5df2f786..c65192c74bb5 100644 --- a/crates/rspack_core/src/cache/persistent/storage/mod.rs +++ b/crates/rspack_core/src/cache/persistent/storage/mod.rs @@ -6,7 +6,7 @@ use std::{path::PathBuf, sync::Arc}; pub use memory::MemoryStorage; use rspack_fs::IntermediateFileSystem; pub use rspack_storage::Storage; -use rspack_storage::{PackBridgeFS, PackStorage, PackStorageOptions}; +use rspack_storage::{PackStorage, PackStorageOptions, StorageBridgeFS}; /// Storage Options /// @@ -31,7 +31,7 @@ pub fn create_storage( bucket_size: 20, pack_size: 500 * 1024, expire: 7 * 24 * 60 * 60 * 1000, - fs: Arc::new(PackBridgeFS(fs)), + fs: Arc::new(StorageBridgeFS(fs)), version, }; Arc::new(PackStorage::new(option)) diff --git a/crates/rspack_storage/src/lib.rs b/crates/rspack_storage/src/lib.rs index 3cb0bf410d50..fbee0652f31a 100644 --- a/crates/rspack_storage/src/lib.rs +++ b/crates/rspack_storage/src/lib.rs @@ -2,7 +2,7 @@ mod pack; use std::sync::Arc; -pub use pack::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions}; +pub use pack::{PackStorage, PackStorageOptions, StorageBridgeFS, StorageFS}; use rspack_error::Result; use tokio::sync::oneshot::Receiver; diff --git a/crates/rspack_storage/src/pack/data/scope.rs b/crates/rspack_storage/src/pack/data/scope.rs index 4ca9181e7f8d..df0376405694 100644 --- a/crates/rspack_storage/src/pack/data/scope.rs +++ b/crates/rspack_storage/src/pack/data/scope.rs @@ -95,6 +95,7 @@ impl ScopePacksState { #[derive(Debug)] pub struct PackScope { + pub name: &'static str, pub path: Utf8PathBuf, pub options: Arc, pub meta: ScopeMetaState, @@ -103,8 +104,9 @@ pub struct PackScope { } impl PackScope { - pub fn new(path: Utf8PathBuf, options: Arc) -> Self { + pub fn new(name: &'static str, path: Utf8PathBuf, options: Arc) -> Self { Self { + name, path, options, meta: ScopeMetaState::Pending, @@ -113,8 +115,8 @@ impl PackScope { } } - pub fn empty(path: Utf8PathBuf, options: Arc) -> Self { - let mut scope = Self::new(path, options); + pub fn empty(name: &'static str, path: Utf8PathBuf, options: Arc) -> Self { + let mut scope = Self::new(name, path, options); scope.clear(); scope } diff --git a/crates/rspack_storage/src/pack/fs/error.rs b/crates/rspack_storage/src/pack/fs/error.rs index 4fc5fa52617c..ea963cfe0cab 100644 --- a/crates/rspack_storage/src/pack/fs/error.rs +++ b/crates/rspack_storage/src/pack/fs/error.rs @@ -3,13 +3,14 @@ use std::io::ErrorKind; use cow_utils::CowUtils; use rspack_error::{ miette::{self}, - thiserror::{self, Error}, - Error, + thiserror::Error, + Result, }; use rspack_paths::Utf8Path; +use tokio::task::JoinError; #[derive(Debug)] -pub enum PackFsErrorOpt { +pub enum StorageFSOperation { Read, Write, Dir, @@ -18,7 +19,7 @@ pub enum PackFsErrorOpt { Move, } -impl std::fmt::Display for PackFsErrorOpt { +impl std::fmt::Display for StorageFSOperation { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Read => write!(f, "read"), @@ -32,28 +33,22 @@ impl std::fmt::Display for PackFsErrorOpt { } #[derive(Debug, Error)] -#[error(r#"Rspack Storage FS Error: {opt} `{file}` failed with `{inner}`"#)] -pub struct PackFsError { +pub struct StorageFSError { file: String, - inner: Error, - opt: PackFsErrorOpt, - kind: Option, + inner: rspack_fs::Error, + opt: StorageFSOperation, } -impl PackFsError { - pub fn from_fs_error(file: &Utf8Path, opt: PackFsErrorOpt, error: rspack_fs::Error) -> Self { - let kind = match &error { - rspack_fs::Error::Io(e) => Some(e.kind()), - }; +impl StorageFSError { + pub fn from_fs_error(file: &Utf8Path, opt: StorageFSOperation, error: rspack_fs::Error) -> Self { Self { file: file.to_string(), - inner: error.into(), + inner: error, opt, - kind, } } pub fn is_not_found(&self) -> bool { - if self.kind.is_some_and(|k| matches!(k, ErrorKind::NotFound)) { + if matches!(self.kind(), ErrorKind::NotFound) { return true; } let error_content = self.inner.to_string(); @@ -61,11 +56,30 @@ impl PackFsError { lower_case_error_content.contains("no such file") || lower_case_error_content.contains("file not exists") } + pub fn kind(&self) -> ErrorKind { + match &self.inner { + rspack_fs::Error::Io(e) => e.kind(), + } + } } -impl miette::Diagnostic for PackFsError { +impl std::fmt::Display for StorageFSError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{} `{}` failed due to `{}`", + self.opt, + self.file, + match &self.inner { + rspack_fs::Error::Io(e) => e, + } + ) + } +} + +impl miette::Diagnostic for StorageFSError { fn code<'a>(&'a self) -> Option> { - Some(Box::new("PackFsError")) + Some(Box::new("StorageFSError")) } fn severity(&self) -> Option { Some(miette::Severity::Warning) @@ -73,7 +87,65 @@ impl miette::Diagnostic for PackFsError { fn url<'a>(&'a self) -> Option> { Some(Box::new(self.file.clone())) } - fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> { - Some(self.inner.as_ref()) +} + +#[derive(Debug, Error)] +pub struct BatchStorageFSError { + message: String, + join_error: Option, + errors: Vec, +} + +impl std::fmt::Display for BatchStorageFSError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message)?; + if let Some(join_error) = &self.join_error { + write!(f, " due to `{}`", join_error)?; + } + for error in &self.errors { + write!(f, "\n- {}", error)?; + } + Ok(()) + } +} + +impl BatchStorageFSError { + pub fn try_from_joined_result( + message: &str, + res: Result>, JoinError>, + ) -> Option { + match res { + Ok(res) => Self::try_from_results(message, res), + Err(join_error) => Some(Self { + message: message.to_string(), + errors: vec![], + join_error: Some(join_error), + }), + } + } + + pub fn try_from_results(message: &str, results: Vec>) -> Option { + let errors = results + .into_iter() + .filter_map(|res| res.err()) + .collect::>(); + if errors.is_empty() { + None + } else { + Some(Self { + message: message.to_string(), + errors, + join_error: None, + }) + } + } +} + +impl miette::Diagnostic for BatchStorageFSError { + fn code<'a>(&'a self) -> Option> { + Some(Box::new("BatchStorageFSError")) + } + fn severity(&self) -> Option { + Some(miette::Severity::Warning) } } diff --git a/crates/rspack_storage/src/pack/fs/mod.rs b/crates/rspack_storage/src/pack/fs/mod.rs index d38eab3c821a..aa98d6a06582 100644 --- a/crates/rspack_storage/src/pack/fs/mod.rs +++ b/crates/rspack_storage/src/pack/fs/mod.rs @@ -3,13 +3,13 @@ use std::sync::Arc; use rspack_error::Result; mod error; -pub use error::{PackFsError, PackFsErrorOpt}; +pub use error::{BatchStorageFSError, StorageFSError, StorageFSOperation}; use rspack_fs::{FileMetadata, IntermediateFileSystem, ReadStream, WriteStream}; use rspack_paths::Utf8Path; use rustc_hash::FxHashSet as HashSet; #[async_trait::async_trait] -pub trait PackFS: std::fmt::Debug + Sync + Send { +pub trait StorageFS: std::fmt::Debug + Sync + Send { async fn exists(&self, path: &Utf8Path) -> Result; async fn remove_dir(&self, path: &Utf8Path) -> Result<()>; async fn ensure_dir(&self, path: &Utf8Path) -> Result<()>; @@ -22,14 +22,14 @@ pub trait PackFS: std::fmt::Debug + Sync + Send { } #[derive(Debug)] -pub struct PackBridgeFS(pub Arc); +pub struct StorageBridgeFS(pub Arc); #[async_trait::async_trait] -impl PackFS for PackBridgeFS { +impl StorageFS for StorageBridgeFS { async fn exists(&self, path: &Utf8Path) -> Result { match self.metadata(path).await { Ok(_) => Ok(true), - Err(e) => match e.downcast::() { + Err(e) => match e.downcast::() { Ok(e) => { if e.is_not_found() { Ok(false) @@ -48,7 +48,7 @@ impl PackFS for PackBridgeFS { .0 .remove_dir_all(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Remove, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Remove, e))?; } Ok(()) } @@ -58,7 +58,7 @@ impl PackFS for PackBridgeFS { .0 .create_dir_all(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Dir, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Dir, e))?; Ok(()) } @@ -74,7 +74,7 @@ impl PackFS for PackBridgeFS { .0 .create_write_stream(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Write, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Write, e))?; Ok(res) } @@ -84,7 +84,7 @@ impl PackFS for PackBridgeFS { .0 .create_read_stream(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Read, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Read, e))?; Ok(res) } @@ -93,7 +93,7 @@ impl PackFS for PackBridgeFS { .0 .read_dir(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Read, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Read, e))?; Ok(files.into_iter().collect::>()) } @@ -102,7 +102,7 @@ impl PackFS for PackBridgeFS { .0 .stat(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Stat, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Stat, e))?; Ok(res) } @@ -112,7 +112,7 @@ impl PackFS for PackBridgeFS { .0 .remove_file(path) .await - .map_err(|e| PackFsError::from_fs_error(path, PackFsErrorOpt::Remove, e))?; + .map_err(|e| StorageFSError::from_fs_error(path, StorageFSOperation::Remove, e))?; } Ok(()) } @@ -126,7 +126,7 @@ impl PackFS for PackBridgeFS { .0 .rename(from, to) .await - .map_err(|e| PackFsError::from_fs_error(from, PackFsErrorOpt::Move, e))?; + .map_err(|e| StorageFSError::from_fs_error(from, StorageFSOperation::Move, e))?; } Ok(()) } @@ -140,14 +140,14 @@ mod tests { use rspack_fs::MemoryFileSystem; use rspack_paths::Utf8PathBuf; - use super::PackBridgeFS; - use crate::PackFS; + use super::StorageBridgeFS; + use crate::StorageFS; fn get_path(p: &str) -> Utf8PathBuf { Utf8PathBuf::from(p) } - async fn test_create_dir(fs: &PackBridgeFS) -> Result<()> { + async fn test_create_dir(fs: &StorageBridgeFS) -> Result<()> { fs.ensure_dir(&get_path("/parent/from")).await?; fs.ensure_dir(&get_path("/parent/to")).await?; @@ -160,7 +160,7 @@ mod tests { Ok(()) } - async fn test_write_file(fs: &PackBridgeFS) -> Result<()> { + async fn test_write_file(fs: &StorageBridgeFS) -> Result<()> { let mut writer = fs.write_file(&get_path("/parent/from/file.txt")).await?; writer.write_line("hello").await?; @@ -177,7 +177,7 @@ mod tests { Ok(()) } - async fn test_read_file(fs: &PackBridgeFS) -> Result<()> { + async fn test_read_file(fs: &StorageBridgeFS) -> Result<()> { let mut reader = fs.read_file(&get_path("/parent/from/file.txt")).await?; assert_eq!(reader.read_line().await?, "hello"); @@ -186,7 +186,7 @@ mod tests { Ok(()) } - async fn test_move_file(fs: &PackBridgeFS) -> Result<()> { + async fn test_move_file(fs: &StorageBridgeFS) -> Result<()> { fs.move_file( &get_path("/parent/from/file.txt"), &get_path("/parent/to/file.txt"), @@ -199,13 +199,13 @@ mod tests { Ok(()) } - async fn test_remove_file(fs: &PackBridgeFS) -> Result<()> { + async fn test_remove_file(fs: &StorageBridgeFS) -> Result<()> { fs.remove_file(&get_path("/parent/to/file.txt")).await?; assert!(!fs.exists(&get_path("/parent/to/file.txt")).await?); Ok(()) } - async fn test_remove_dir(fs: &PackBridgeFS) -> Result<()> { + async fn test_remove_dir(fs: &StorageBridgeFS) -> Result<()> { fs.remove_dir(&get_path("/parent/from")).await?; fs.remove_dir(&get_path("/parent/to")).await?; assert!(!fs.exists(&get_path("/parent/from")).await?); @@ -213,19 +213,19 @@ mod tests { Ok(()) } - async fn test_error(fs: &PackBridgeFS) -> Result<()> { + async fn test_error(fs: &StorageBridgeFS) -> Result<()> { match fs.metadata(&get_path("/parent/from/not_exist.txt")).await { Ok(_) => panic!("should error"), Err(e) => assert_eq!( e.to_string(), - r#"Rspack Storage FS Error: stat `/parent/from/not_exist.txt` failed with `Rspack FS Error: file not exist`"# + r#"stat `/parent/from/not_exist.txt` failed due to `file not exist`"# ), }; Ok(()) } - async fn test_memory_fs(fs: &PackBridgeFS) -> Result<()> { + async fn test_memory_fs(fs: &StorageBridgeFS) -> Result<()> { test_create_dir(fs).await?; test_write_file(fs).await?; test_read_file(fs).await?; @@ -239,8 +239,8 @@ mod tests { #[tokio::test] #[cfg_attr(miri, ignore)] - async fn should_pack_bridge_fs_work() { - let fs = PackBridgeFS(Arc::new(MemoryFileSystem::default())); + async fn should_storage_bridge_fs_work() { + let fs = StorageBridgeFS(Arc::new(MemoryFileSystem::default())); let _ = test_memory_fs(&fs).await.map_err(|e| { panic!("{}", e); diff --git a/crates/rspack_storage/src/pack/manager/mod.rs b/crates/rspack_storage/src/pack/manager/mod.rs index b6dae30869fe..67def15ae795 100644 --- a/crates/rspack_storage/src/pack/manager/mod.rs +++ b/crates/rspack_storage/src/pack/manager/mod.rs @@ -7,13 +7,13 @@ use itertools::Itertools; use pollster::block_on; use queue::TaskQueue; use rayon::iter::{ParallelBridge, ParallelIterator}; -use rspack_error::{error, Error, Result}; +use rspack_error::{Error, Result}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; use tokio::sync::oneshot::Receiver; use tokio::sync::{oneshot, Mutex}; use super::data::{PackOptions, PackScope, RootMeta, RootMetaState, RootOptions}; -use super::strategy::{ScopeStrategy, ValidateResult, WriteScopeResult}; +use super::strategy::{ScopeStrategy, StorageValidateError, ValidateResult, WriteScopeResult}; use super::ScopeUpdates; use crate::StorageContent; @@ -116,7 +116,13 @@ impl ScopeManager { .lock() .await .entry(name.to_string()) - .or_insert_with(|| PackScope::new(self.strategy.get_path(name), self.pack_options.clone())); + .or_insert_with(|| { + PackScope::new( + name, + self.strategy.get_path(name), + self.pack_options.clone(), + ) + }); // only check lock file and root meta for the first time if matches!(*self.root_meta.lock().await, RootMetaState::Pending) { @@ -148,15 +154,15 @@ impl ScopeManager { Ok(vec![]) } // clear scope if invalid - ValidateResult::Invalid(_) => { + ValidateResult::Invalid(detail) => { self.clear_scope(name).await; - Err(error!(validated.to_string())) + Err(StorageValidateError::from_detail(Some(name), detail).into()) } }, Err(e) => { // clear scope if error self.clear_scope(name).await; - Err(Error::from(e)) + Err(StorageValidateError::from_error(Some(name), e).into()) } } } @@ -197,9 +203,13 @@ fn update_scopes( strategy: &dyn ScopeStrategy, ) -> Result<()> { for (scope_name, _) in updates.iter() { - scopes - .entry(scope_name.to_string()) - .or_insert_with(|| PackScope::empty(strategy.get_path(scope_name), pack_options.clone())); + scopes.entry(scope_name.to_string()).or_insert_with(|| { + PackScope::empty( + scope_name, + strategy.get_path(scope_name), + pack_options.clone(), + ) + }); } scopes @@ -276,7 +286,7 @@ mod tests { use crate::{ pack::{ data::{PackOptions, RootOptions}, - fs::{PackBridgeFS, PackFS}, + fs::{StorageBridgeFS, StorageFS}, manager::ScopeManager, strategy::SplitPackStrategy, }, @@ -305,7 +315,7 @@ mod tests { ) } - async fn test_cold_start(root: &Utf8Path, temp: &Utf8Path, fs: Arc) -> Result<()> { + async fn test_cold_start(root: &Utf8Path, temp: &Utf8Path, fs: Arc) -> Result<()> { let root_options = Arc::new(RootOptions { expire: 60000, root: root.parent().expect("should get parent").to_path_buf(), @@ -359,7 +369,7 @@ mod tests { Ok(()) } - async fn test_hot_start(root: &Utf8Path, temp: &Utf8Path, fs: Arc) -> Result<()> { + async fn test_hot_start(root: &Utf8Path, temp: &Utf8Path, fs: Arc) -> Result<()> { let root_options = Arc::new(RootOptions { expire: 60000, root: root.parent().expect("should get parent").to_path_buf(), @@ -437,7 +447,11 @@ mod tests { Ok(()) } - async fn test_invalid_start(root: &Utf8Path, temp: &Utf8Path, fs: Arc) -> Result<()> { + async fn test_invalid_start( + root: &Utf8Path, + temp: &Utf8Path, + fs: Arc, + ) -> Result<()> { let root_options = Arc::new(RootOptions { expire: 60000, root: root.parent().expect("should get parent").to_path_buf(), @@ -458,7 +472,7 @@ mod tests { // should report error when invalid failed assert_eq!( manager.load("scope1").await.unwrap_err().to_string(), - "validation failed due to `options.bucketSize` changed" + "validate scope `scope1` failed due to `options.bucketSize` changed" ); // clear after invalid, can be used as a empty scope @@ -484,7 +498,7 @@ mod tests { } async fn test_manager() -> Result<()> { - let fs = Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))); + let fs = Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))); let root = Utf8PathBuf::from("/cache/test_manager"); let temp = Utf8PathBuf::from("/temp/test_manager"); test_cold_start(&root, &temp, fs.clone()).await?; diff --git a/crates/rspack_storage/src/pack/mod.rs b/crates/rspack_storage/src/pack/mod.rs index 1fa676e10295..454d166eb4bc 100644 --- a/crates/rspack_storage/src/pack/mod.rs +++ b/crates/rspack_storage/src/pack/mod.rs @@ -9,7 +9,7 @@ use std::{ }; use data::{PackOptions, RootOptions}; -pub use fs::{PackBridgeFS, PackFS}; +pub use fs::{StorageBridgeFS, StorageFS}; use manager::ScopeManager; use rspack_error::Result; use rspack_paths::AssertUtf8; @@ -29,7 +29,7 @@ pub struct PackStorage { pub struct PackStorageOptions { pub root: PathBuf, pub temp_root: PathBuf, - pub fs: Arc, + pub fs: Arc, pub bucket_size: usize, pub pack_size: usize, pub expire: u64, diff --git a/crates/rspack_storage/src/pack/strategy/mod.rs b/crates/rspack_storage/src/pack/strategy/mod.rs index 03de839a4bf0..b1f9d61069b4 100644 --- a/crates/rspack_storage/src/pack/strategy/mod.rs +++ b/crates/rspack_storage/src/pack/strategy/mod.rs @@ -1,7 +1,7 @@ mod split; use async_trait::async_trait; -use rspack_error::Result; +use rspack_error::{miette, thiserror::Error, Result}; use rspack_paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; pub use split::SplitPackStrategy; @@ -70,6 +70,36 @@ pub trait ScopeReadStrategy { async fn ensure_contents(&self, scope: &mut PackScope) -> Result<()>; } +#[async_trait] +pub trait ScopeValidateStrategy { + async fn validate_meta(&self, scope: &mut PackScope) -> Result; + async fn validate_packs(&self, scope: &mut PackScope) -> Result; +} + +#[derive(Debug, Default, Clone)] +pub struct WriteScopeResult { + pub wrote_files: HashSet, + pub removed_files: HashSet, +} + +impl WriteScopeResult { + pub fn extend(&mut self, other: Self) { + self.wrote_files.extend(other.wrote_files); + self.removed_files.extend(other.removed_files); + } +} + +pub type ScopeUpdate = HashMap>; +#[async_trait] +pub trait ScopeWriteStrategy { + fn update_scope(&self, scope: &mut PackScope, updates: ScopeUpdate) -> Result<()>; + async fn before_all(&self, scopes: &mut HashMap) -> Result<()>; + async fn write_packs(&self, scope: &mut PackScope) -> Result; + async fn write_meta(&self, scope: &mut PackScope) -> Result; + async fn merge_changed(&self, changed: WriteScopeResult) -> Result<()>; + async fn after_all(&self, scopes: &mut HashMap) -> Result<()>; +} + #[derive(Debug)] pub struct InvalidDetail { pub reason: String, @@ -101,13 +131,52 @@ impl ValidateResult { } } -impl std::fmt::Display for ValidateResult { +#[derive(Debug)] +enum ScopeValidateErrorReason { + Reason(String), + Detail(InvalidDetail), + Error(rspack_error::Error), +} + +#[derive(Debug, Error)] +pub struct StorageValidateError { + scope: Option<&'static str>, + inner: ScopeValidateErrorReason, +} + +impl StorageValidateError { + pub fn from_detail(scope: Option<&'static str>, detail: InvalidDetail) -> Self { + Self { + scope, + inner: ScopeValidateErrorReason::Detail(detail), + } + } + pub fn from_error(scope: Option<&'static str>, error: rspack_error::Error) -> Self { + Self { + scope, + inner: ScopeValidateErrorReason::Error(error), + } + } + pub fn from_reason(scope: Option<&'static str>, reason: String) -> Self { + Self { + scope, + inner: ScopeValidateErrorReason::Reason(reason), + } + } +} + +impl std::fmt::Display for StorageValidateError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ValidateResult::NotExists => write!(f, "validation failed due to not exists"), - ValidateResult::Valid => write!(f, "validation passed"), - ValidateResult::Invalid(e) => { - let mut pack_info_lines = e + write!(f, "validate ")?; + if let Some(scope) = self.scope { + write!(f, "scope `{}` ", scope)?; + } + write!(f, "failed due to")?; + + match &self.inner { + ScopeValidateErrorReason::Detail(detail) => { + write!(f, " {}", detail.reason)?; + let mut pack_info_lines = detail .packs .iter() .map(|p| format!("- {}", p)) @@ -116,47 +185,32 @@ impl std::fmt::Display for ValidateResult { pack_info_lines.truncate(5); pack_info_lines.push("...".to_string()); } - write!( - f, - "validation failed due to {}{}", - e.reason, - if pack_info_lines.is_empty() { - "".to_string() - } else { - format!(":\n{}", pack_info_lines.join("\n")) - } - ) + if !pack_info_lines.is_empty() { + write!(f, ":\n{}", pack_info_lines.join("\n"))?; + } + } + ScopeValidateErrorReason::Error(e) => { + write!(f, " {}", e)?; + } + ScopeValidateErrorReason::Reason(e) => { + write!(f, " {}", e)?; } } + Ok(()) } } -#[async_trait] -pub trait ScopeValidateStrategy { - async fn validate_meta(&self, scope: &mut PackScope) -> Result; - async fn validate_packs(&self, scope: &mut PackScope) -> Result; -} - -#[derive(Debug, Default, Clone)] -pub struct WriteScopeResult { - pub wrote_files: HashSet, - pub removed_files: HashSet, -} - -impl WriteScopeResult { - pub fn extend(&mut self, other: Self) { - self.wrote_files.extend(other.wrote_files); - self.removed_files.extend(other.removed_files); +impl miette::Diagnostic for StorageValidateError { + fn code<'a>(&'a self) -> Option> { + Some(Box::new("StorageValidateError")) + } + fn severity(&self) -> Option { + Some(miette::Severity::Warning) + } + fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> { + match &self.inner { + ScopeValidateErrorReason::Error(e) => Some(e.as_ref()), + _ => None, + } } -} - -pub type ScopeUpdate = HashMap>; -#[async_trait] -pub trait ScopeWriteStrategy { - fn update_scope(&self, scope: &mut PackScope, updates: ScopeUpdate) -> Result<()>; - async fn before_all(&self, scopes: &mut HashMap) -> Result<()>; - async fn write_packs(&self, scope: &mut PackScope) -> Result; - async fn write_meta(&self, scope: &mut PackScope) -> Result; - async fn merge_changed(&self, changed: WriteScopeResult) -> Result<()>; - async fn after_all(&self, scopes: &mut HashMap) -> Result<()>; } diff --git a/crates/rspack_storage/src/pack/strategy/split/handle_file.rs b/crates/rspack_storage/src/pack/strategy/split/handle_file.rs index ea50952e8a41..08f0077620e9 100644 --- a/crates/rspack_storage/src/pack/strategy/split/handle_file.rs +++ b/crates/rspack_storage/src/pack/strategy/split/handle_file.rs @@ -1,20 +1,25 @@ use std::sync::Arc; -use futures::{future::join_all, TryFutureExt}; +use futures::future::join_all; use rspack_error::{error, Result}; use rspack_paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use tokio::task::JoinError; use crate::{ - pack::data::{current_time, PackScope, RootMeta, RootOptions, ScopeMeta}, - PackFS, + pack::{ + data::{current_time, PackScope, RootMeta, RootOptions, ScopeMeta}, + fs::BatchStorageFSError, + strategy::StorageValidateError, + }, + StorageFS, }; pub async fn prepare_scope( scope_path: &Utf8Path, root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let temp_path = redirect_to_path(scope_path, root, temp_root)?; fs.remove_dir(&temp_path).await?; @@ -27,7 +32,7 @@ pub async fn prepare_scope_dirs( scopes: &HashMap, root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let tasks = scopes.values().map(|scope| { let fs = fs.clone(); @@ -35,53 +40,41 @@ pub async fn prepare_scope_dirs( let root_path = root.to_path_buf(); let temp_root_path = temp_root.to_path_buf(); tokio::spawn(async move { prepare_scope(&scope_path, &root_path, &temp_root_path, fs).await }) - .map_err(|e| error!("{e}")) }); - let res = join_all(tasks) - .await - .into_iter() - .collect::>>()?; + let res = BatchStorageFSError::try_from_joined_result( + "prepare scopes directories failed", + join_all(tasks) + .await + .into_iter() + .collect::, JoinError>>(), + ); - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) + if let Some(e) = res { + Err(e.into()) } else { - Err(error!( - "prepare scopes directories failed:\n{}", - errors.join("\n") - )) + Ok(()) } } -pub async fn remove_files(files: HashSet, fs: Arc) -> Result<()> { +pub async fn remove_files(files: HashSet, fs: Arc) -> Result<()> { let tasks = files.into_iter().map(|path| { let fs = fs.clone(); - tokio::spawn(async move { fs.remove_file(&path).await }).map_err(|e| error!("{e}")) + tokio::spawn(async move { fs.remove_file(&path).await }) }); - let res = join_all(tasks) - .await - .into_iter() - .collect::>>()?; + let res = BatchStorageFSError::try_from_joined_result( + "remove files failed", + join_all(tasks) + .await + .into_iter() + .collect::, JoinError>>(), + ); - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) + if let Some(e) = res { + Err(e.into()) } else { - Err(error!("remove files failed:\n{}", errors.join("\n"))) + Ok(()) } } @@ -90,7 +83,7 @@ pub async fn write_lock( files: &HashSet, root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let lock_file = root.join(lock_file); let mut lock_writer = fs.write_file(&lock_file).await?; @@ -104,7 +97,7 @@ pub async fn write_lock( Ok(()) } -pub async fn remove_lock(lock_file: &str, root: &Utf8Path, fs: Arc) -> Result<()> { +pub async fn remove_lock(lock_file: &str, root: &Utf8Path, fs: Arc) -> Result<()> { let lock_file = root.join(lock_file); fs.remove_file(&lock_file).await?; Ok(()) @@ -114,7 +107,7 @@ pub async fn move_files( files: HashSet, root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let lock_file = root.join("move.lock"); let mut lock_writer = fs.write_file(&lock_file).await?; @@ -134,25 +127,22 @@ pub async fn move_files( let tasks = candidates.into_iter().map(|(from, to)| { let fs = fs.clone(); - tokio::spawn(async move { fs.move_file(&from, &to).await }).map_err(|e| error!("{e}")) + tokio::spawn(async move { fs.move_file(&from, &to).await }) }); - let res = join_all(tasks) - .await - .into_iter() - .collect::>>>()?; + let res = BatchStorageFSError::try_from_joined_result( + "move temp files failed", + join_all(tasks) + .await + .into_iter() + .collect::>, JoinError>>(), + ); - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) + if let Some(e) = res { + Err(e.into()) } else { - Err(error!("move temp files failed:\n{}", errors.join("\n"))) + fs.remove_file(&lock_file).await?; + Ok(()) } } @@ -160,7 +150,7 @@ async fn recovery_lock( lock: &str, root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result> { let lock_file = root.join(lock); if !fs.exists(&lock_file).await? { @@ -168,7 +158,7 @@ async fn recovery_lock( } let mut lock_reader = fs.read_file(&lock_file).await?; let lock_file_content = String::from_utf8(lock_reader.read_to_end().await?) - .map_err(|e| error!("parse utf8 failed: {}", e))?; + .map_err(|e| StorageValidateError::from_reason(None, format!("parse utf8 failed: {}", e)))?; let files = lock_file_content .split("\n") .map(|i| i.to_owned()) @@ -176,12 +166,22 @@ async fn recovery_lock( fs.remove_file(&lock_file).await?; if files.is_empty() { - return Err(error!("incomplete storage due to empty `move.lock`")); + return Err( + StorageValidateError::from_reason( + None, + "incomplete storage due to empty `move.lock`".to_string(), + ) + .into(), + ); } if files.first().is_some_and(|root| !root.eq(temp_root)) { - return Err(error!( - "incomplete storage due to `move.lock` from an unexpected directory" - )); + return Err( + StorageValidateError::from_reason( + None, + "incomplete storage due to `move.lock` from an unexpected directory".to_string(), + ) + .into(), + ); } Ok(files[1..].to_vec()) } @@ -189,7 +189,7 @@ async fn recovery_lock( pub async fn recovery_move_lock( root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let moving_files = recovery_lock("move.lock", root, temp_root, fs.clone()).await?; if moving_files.is_empty() { @@ -211,7 +211,7 @@ pub async fn recovery_move_lock( pub async fn recovery_remove_lock( root: &Utf8Path, temp_root: &Utf8Path, - fs: Arc, + fs: Arc, ) -> Result<()> { let removing_files = recovery_lock("remove.lock", root, temp_root, fs.clone()).await?; if removing_files.is_empty() { @@ -228,7 +228,7 @@ pub async fn recovery_remove_lock( Ok(()) } -pub async fn walk_dir(root: &Utf8Path, fs: Arc) -> Result> { +pub async fn walk_dir(root: &Utf8Path, fs: Arc) -> Result> { let mut files = HashSet::default(); let mut stack = vec![root.to_owned()]; while let Some(path) = stack.pop() { @@ -262,7 +262,7 @@ pub fn redirect_to_path(path: &Utf8Path, src: &Utf8Path, dist: &Utf8Path) -> Res Ok(dist.join(relative_path)) } -async fn clean_scope(scope: &PackScope, fs: Arc) -> Result<()> { +async fn try_remove_scope_files(scope: &PackScope, fs: Arc) -> Result<()> { let scope_root = &scope.path; let scope_meta_file = ScopeMeta::get_path(scope_root); let mut scope_files = scope @@ -288,26 +288,24 @@ async fn clean_scope(scope: &PackScope, fs: Arc) -> Result<()> { Ok(()) } -pub async fn clean_scopes(scopes: &HashMap, fs: Arc) -> Result<()> { - let clean_scope_tasks = scopes.values().map(|scope| clean_scope(scope, fs.clone())); - - let res = join_all(clean_scope_tasks).await; - - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) +pub async fn remove_unused_scope_files( + scopes: &HashMap, + fs: Arc, +) -> Result<()> { + let clean_scope_tasks = scopes + .values() + .map(|scope| try_remove_scope_files(scope, fs.clone())); + + if let Some(e) = + BatchStorageFSError::try_from_results("clean scopes failed", join_all(clean_scope_tasks).await) + { + Err(e.into()) } else { - Err(error!("clean scopes failed:\n{}", errors.join("\n"))) + Ok(()) } } -async fn remove_unused_scope(name: &str, dir: &Utf8Path, fs: Arc) -> Result<()> { +async fn try_remove_scope(name: &str, dir: &Utf8Path, fs: Arc) -> Result<()> { // do not remove hidden dirs if name.starts_with(".") { return Ok(()); @@ -323,39 +321,35 @@ async fn remove_unused_scope(name: &str, dir: &Utf8Path, fs: Arc) -> Ok(()) } -pub async fn clean_root(root: &Utf8Path, root_meta: &RootMeta, fs: Arc) -> Result<()> { +pub async fn remove_unused_scopes( + root: &Utf8Path, + root_meta: &RootMeta, + fs: Arc, +) -> Result<()> { let dirs = fs.read_dir(root).await?; let tasks = dirs.difference(&root_meta.scopes).map(|name| { let fs = fs.clone(); let scope_dir = root.join(name); let scope_name = name.clone(); - tokio::spawn(async move { remove_unused_scope(&scope_name, &scope_dir, fs).await }) - .map_err(|e| error!("{e}")) + tokio::spawn(async move { try_remove_scope(&scope_name, &scope_dir, fs).await }) }); - let res = join_all(tasks) - .await - .into_iter() - .collect::>>()?; + let res = BatchStorageFSError::try_from_joined_result( + "remove unused scopes failed", + join_all(tasks) + .await + .into_iter() + .collect::, JoinError>>(), + ); - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) + if let Some(e) = res { + Err(e.into()) } else { - Err(error!( - "remove unused scopes failed:\n{}", - errors.join("\n") - )) + Ok(()) } } -async fn remove_expired_version(version: &str, dir: &Utf8Path, fs: Arc) -> Result<()> { +async fn try_remove_version(version: &str, dir: &Utf8Path, fs: Arc) -> Result<()> { // do not remove hidden dirs and lock files if version.starts_with(".") || version.contains(".lock") { return Ok(()); @@ -374,11 +368,9 @@ async fn remove_expired_version(version: &str, dir: &Utf8Path, fs: Arc() - .map_err(|e| error!("parse option meta failed: {}", e))?; + let expire_time = reader.read_line().await?.parse::().map_err(|e| { + StorageValidateError::from_reason(None, format!("parse option meta failed: {}", e)) + })?; let current = current_time(); if current > expire_time { @@ -388,10 +380,10 @@ async fn remove_expired_version(version: &str, dir: &Utf8Path, fs: Arc, + fs: Arc, ) -> Result<()> { let dirs = fs.read_dir(&root_options.root).await?; let tasks = dirs.into_iter().filter_map(|version| { @@ -400,31 +392,23 @@ pub async fn clean_versions( None } else { let fs = fs.clone(); - Some( - tokio::spawn(async move { remove_expired_version(&version, &version_dir, fs).await }) - .map_err(|e| error!("{e}")), - ) + Some(tokio::spawn(async move { + try_remove_version(&version, &version_dir, fs).await + })) } }); - let res = join_all(tasks) - .await - .into_iter() - .collect::>>()?; + let res = BatchStorageFSError::try_from_joined_result( + "remove expired versions failed", + join_all(tasks) + .await + .into_iter() + .collect::, JoinError>>(), + ); - let mut errors = vec![]; - for task_result in res { - if let Err(e) = task_result { - errors.push(format!("- {}", e)); - } - } - - if errors.is_empty() { - Ok(()) + if let Some(e) = res { + Err(e.into()) } else { - Err(error!( - "remove expired versions failed:\n{}", - errors.join("\n") - )) + Ok(()) } } diff --git a/crates/rspack_storage/src/pack/strategy/split/mod.rs b/crates/rspack_storage/src/pack/strategy/split/mod.rs index a4a7dd289b3e..e9fb8afe1480 100644 --- a/crates/rspack_storage/src/pack/strategy/split/mod.rs +++ b/crates/rspack_storage/src/pack/strategy/split/mod.rs @@ -9,29 +9,30 @@ mod write_scope; use std::{hash::Hasher, sync::Arc}; use handle_file::{ - clean_root, clean_scopes, clean_versions, recovery_move_lock, recovery_remove_lock, + recovery_move_lock, recovery_remove_lock, remove_expired_versions, remove_unused_scope_files, + remove_unused_scopes, }; use itertools::Itertools; -use rspack_error::{error, Result}; +use rspack_error::Result; use rspack_paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet, FxHasher}; use util::get_name; -use super::{RootStrategy, ScopeStrategy, ValidateResult}; +use super::{RootStrategy, ScopeStrategy, StorageValidateError, ValidateResult}; use crate::pack::{ data::{current_time, PackContents, PackKeys, PackScope, RootMeta, RootMetaFrom, RootOptions}, - fs::PackFS, + fs::StorageFS, }; #[derive(Debug, Clone)] pub struct SplitPackStrategy { - pub fs: Arc, + pub fs: Arc, pub root: Arc, pub temp_root: Arc, } impl SplitPackStrategy { - pub fn new(root: Utf8PathBuf, temp_root: Utf8PathBuf, fs: Arc) -> Self { + pub fn new(root: Utf8PathBuf, temp_root: Utf8PathBuf, fs: Arc) -> Self { Self { fs, root: Arc::new(root), @@ -71,11 +72,9 @@ impl RootStrategy for SplitPackStrategy { } let mut reader = self.fs.read_file(&meta_path).await?; - let expire_time = reader - .read_line() - .await? - .parse::() - .map_err(|e| error!("parse option meta failed: {}", e))?; + let expire_time = reader.read_line().await?.parse::().map_err(|e| { + StorageValidateError::from_reason(None, format!("parse root meta failed: {}", e)) + })?; let scopes = reader .read_line() .await? @@ -111,7 +110,7 @@ impl RootStrategy for SplitPackStrategy { } else { let now = current_time(); if now > root_meta.expire_time { - Ok(ValidateResult::invalid("cache expired")) + Ok(ValidateResult::invalid("expiration")) } else { Ok(ValidateResult::Valid) } @@ -129,9 +128,9 @@ impl RootStrategy for SplitPackStrategy { } let _ = tokio::try_join!( - clean_scopes(scopes, self.fs.clone()), - clean_root(&self.root, root_meta, self.fs.clone()), - clean_versions(&self.root, root_options, self.fs.clone()) + remove_unused_scope_files(scopes, self.fs.clone()), + remove_unused_scopes(&self.root, root_meta, self.fs.clone()), + remove_expired_versions(&self.root, root_options, self.fs.clone()) ); Ok(()) diff --git a/crates/rspack_storage/src/pack/strategy/split/read_scope.rs b/crates/rspack_storage/src/pack/strategy/split/read_scope.rs index 580e220d7a5f..69ff091c81c5 100644 --- a/crates/rspack_storage/src/pack/strategy/split/read_scope.rs +++ b/crates/rspack_storage/src/pack/strategy/split/read_scope.rs @@ -9,8 +9,8 @@ use rspack_paths::{Utf8Path, Utf8PathBuf}; use super::{util::get_indexed_packs, SplitPackStrategy}; use crate::pack::{ data::{Pack, PackContents, PackFileMeta, PackKeys, PackScope, ScopeMeta}, - fs::PackFS, - strategy::{PackReadStrategy, ScopeReadStrategy}, + fs::StorageFS, + strategy::{PackReadStrategy, ScopeReadStrategy, StorageValidateError}, }; #[async_trait] @@ -18,7 +18,7 @@ impl ScopeReadStrategy for SplitPackStrategy { async fn ensure_meta(&self, scope: &mut PackScope) -> Result<()> { if !scope.meta.loaded() { let meta_path = ScopeMeta::get_path(&scope.path); - let meta = read_scope_meta(&meta_path, self.fs.clone()) + let meta = read_scope_meta(scope.name, &meta_path, self.fs.clone()) .await? .unwrap_or_else(|| ScopeMeta::new(&scope.path, &scope.options)); scope.meta.set_value(meta); @@ -87,7 +87,11 @@ impl ScopeReadStrategy for SplitPackStrategy { } } -async fn read_scope_meta(path: &Utf8Path, fs: Arc) -> Result> { +async fn read_scope_meta( + scope: &'static str, + path: &Utf8Path, + fs: Arc, +) -> Result> { if !fs.exists(path).await? { return Ok(None); } @@ -99,14 +103,17 @@ async fn read_scope_meta(path: &Utf8Path, fs: Arc) -> Result() - .map_err(|e| error!("parse option meta failed: {}", e)) + item.parse::().map_err(|e| { + StorageValidateError::from_reason(Some(scope), format!("parse option meta failed: {}", e)) + .into() + }) }) .collect::>>()?; if option_items.len() < 2 { - return Err(error!("option meta not match")); + return Err( + StorageValidateError::from_reason(Some(scope), "option meta not match".to_string()).into(), + ); } let bucket_size = option_items[0]; @@ -123,14 +130,20 @@ async fn read_scope_meta(path: &Utf8Path, fs: Arc) -> Result>()) .map(|i| { if i.len() < 3 { - Err(error!("file meta not match")) + Err( + StorageValidateError::from_reason(Some(scope), "file meta not match".to_string()) + .into(), + ) } else { Ok(PackFileMeta { name: i[0].to_owned(), hash: i[1].to_owned(), - size: i[2] - .parse::() - .map_err(|e| error!("parse file meta failed: {}", e))?, + size: i[2].parse::().map_err(|e| { + StorageValidateError::from_reason( + Some(scope), + format!("parse file meta failed: {}", e), + ) + })?, wrote: true, }) } @@ -140,7 +153,9 @@ async fn read_scope_meta(path: &Utf8Path, fs: Arc) -> Result Result Result<()> { + async fn mock_scope(path: &Utf8Path, fs: &dyn StorageFS, options: &PackOptions) -> Result<()> { mock_scope_meta_file(&ScopeMeta::get_path(path), fs, options, 3).await?; for bucket_id in 0..options.bucket_size { for pack_no in 0..3 { @@ -335,7 +351,11 @@ mod tests { bucket_size: 1, pack_size: 16, }); - let mut scope = PackScope::new(strategy.get_path("scope_name"), options.clone()); + let mut scope = PackScope::new( + "scope_name", + strategy.get_path("scope_name"), + options.clone(), + ); mock_scope(&scope.path, strategy.fs.as_ref(), &scope.options) .await diff --git a/crates/rspack_storage/src/pack/strategy/split/util.rs b/crates/rspack_storage/src/pack/strategy/split/util.rs index a74bbf7069c0..cb791a361c6d 100644 --- a/crates/rspack_storage/src/pack/strategy/split/util.rs +++ b/crates/rspack_storage/src/pack/strategy/split/util.rs @@ -86,16 +86,16 @@ pub mod test_pack_utils { use crate::{ pack::{ data::{current_time, PackOptions, PackScope}, - fs::PackFS, + fs::StorageFS, strategy::{ split::handle_file::prepare_scope, ScopeUpdate, ScopeWriteStrategy, SplitPackStrategy, WriteScopeResult, }, }, - PackBridgeFS, + StorageBridgeFS, }; - pub async fn mock_root_meta_file(path: &Utf8Path, fs: &dyn PackFS) -> Result<()> { + pub async fn mock_root_meta_file(path: &Utf8Path, fs: &dyn StorageFS) -> Result<()> { fs.ensure_dir(path.parent().expect("should have parent")) .await?; let mut writer = fs.write_file(path).await?; @@ -108,7 +108,7 @@ pub mod test_pack_utils { pub async fn mock_scope_meta_file( path: &Utf8Path, - fs: &dyn PackFS, + fs: &dyn StorageFS, options: &PackOptions, pack_count: usize, ) -> Result<()> { @@ -138,7 +138,7 @@ pub mod test_pack_utils { path: &Utf8Path, unique_id: &str, item_count: usize, - fs: &dyn PackFS, + fs: &dyn StorageFS, ) -> Result<()> { fs.ensure_dir(path.parent().expect("should have parent")) .await?; @@ -228,7 +228,7 @@ pub mod test_pack_utils { .expect("should remove dir"); } - pub async fn flush_file_mtime(path: &Utf8Path, fs: Arc) -> Result<()> { + pub async fn flush_file_mtime(path: &Utf8Path, fs: Arc) -> Result<()> { let content = fs.read_file(path).await?.read_to_end().await?; fs.write_file(path).await?.write_all(&content).await?; @@ -270,11 +270,11 @@ pub mod test_pack_utils { pub fn create_strategies(case: &str) -> Vec { let fs = [ ( - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), get_memory_path(case), ), ( - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), get_native_path(case), ), ]; diff --git a/crates/rspack_storage/src/pack/strategy/split/validate_scope.rs b/crates/rspack_storage/src/pack/strategy/split/validate_scope.rs index 14b12ab9b7ab..27e006efa202 100644 --- a/crates/rspack_storage/src/pack/strategy/split/validate_scope.rs +++ b/crates/rspack_storage/src/pack/strategy/split/validate_scope.rs @@ -6,7 +6,7 @@ use rspack_error::{error, Result}; use super::{util::get_indexed_packs, SplitPackStrategy}; use crate::pack::{ data::PackScope, - strategy::{ScopeValidateStrategy, ValidateResult}, + strategy::{ScopeValidateStrategy, StorageValidateError, ValidateResult}, }; #[async_trait] @@ -45,7 +45,7 @@ impl ScopeValidateStrategy for SplitPackStrategy { Err(_) => false, } }) - .map_err(|e| error!("{}", e)) + .map_err(|e| StorageValidateError::from_error(Some(scope.name), error!("{}", e)).into()) }); let validate_results = join_all(tasks) @@ -85,7 +85,7 @@ mod tests { use crate::pack::{ data::{PackOptions, PackScope, RootMeta, ScopeMeta}, - fs::PackFS, + fs::StorageFS, strategy::{ split::{ handle_file::prepare_scope, @@ -98,7 +98,7 @@ mod tests { }, }, ScopeReadStrategy, ScopeValidateStrategy, ScopeWriteStrategy, SplitPackStrategy, - ValidateResult, + StorageValidateError, ValidateResult, }, }; @@ -107,7 +107,7 @@ mod tests { bucket_size: 10, pack_size: 100, }); - let mut scope = PackScope::new(scope_path, same_options); + let mut scope = PackScope::new("scope_name", scope_path, same_options); strategy.ensure_meta(&mut scope).await?; let validated = strategy.validate_meta(&mut scope).await?; assert!(validated.is_valid()); @@ -123,25 +123,41 @@ mod tests { bucket_size: 1, pack_size: 100, }); - let mut scope = PackScope::new(scope_path.clone(), bucket_changed_options.clone()); - strategy.ensure_meta(&mut scope).await?; - let validated: ValidateResult = strategy.validate_meta(&mut scope).await?; - assert_eq!( - validated.to_string(), - "validation failed due to `options.bucketSize` changed" + let mut scope = PackScope::new( + "scope_name", + scope_path.clone(), + bucket_changed_options.clone(), ); + strategy.ensure_meta(&mut scope).await?; + if let ValidateResult::Invalid(detail) = strategy.validate_meta(&mut scope).await? { + let error = StorageValidateError::from_detail(Some("test_scope"), detail); + assert_eq!( + error.to_string(), + "validate scope `test_scope` failed due to `options.bucketSize` changed" + ); + } else { + panic!("should be invalid"); + } let max_size_changed_options = Arc::new(PackOptions { bucket_size: 10, pack_size: 99, }); - let mut scope = PackScope::new(scope_path.clone(), max_size_changed_options.clone()); - strategy.ensure_meta(&mut scope).await?; - let validated: ValidateResult = strategy.validate_meta(&mut scope).await?; - assert_eq!( - validated.to_string(), - "validation failed due to `options.packSize` changed" + let mut scope = PackScope::new( + "scope_name", + scope_path.clone(), + max_size_changed_options.clone(), ); + strategy.ensure_meta(&mut scope).await?; + if let ValidateResult::Invalid(detail) = strategy.validate_meta(&mut scope).await? { + let error = StorageValidateError::from_detail(Some("test_scope"), detail); + assert_eq!( + error.to_string(), + "validate scope `test_scope` failed due to `options.packSize` changed" + ); + } else { + panic!("should be invalid"); + } Ok(()) } @@ -151,7 +167,7 @@ mod tests { strategy: &SplitPackStrategy, options: Arc, ) -> Result<()> { - let mut scope = PackScope::new(scope_path, options); + let mut scope = PackScope::new("scope_name", scope_path, options); strategy.ensure_keys(&mut scope).await?; let validated = strategy.validate_packs(&mut scope).await?; assert!(validated.is_valid()); @@ -162,11 +178,11 @@ mod tests { async fn test_invalid_packs_changed( scope_path: Utf8PathBuf, strategy: &SplitPackStrategy, - fs: Arc, + fs: Arc, options: Arc, files: HashSet, ) -> Result<()> { - let mut scope = PackScope::new(scope_path, options); + let mut scope = PackScope::new("scope_name", scope_path, options); for file in files { if !file.to_string().contains("scope_meta") { flush_file_mtime(&file, fs.clone()).await?; @@ -174,11 +190,13 @@ mod tests { } strategy.ensure_keys(&mut scope).await?; - let validated = strategy.validate_packs(&mut scope).await?; - assert!(validated - .to_string() - .starts_with("validation failed due to some packs are modified:")); - assert_eq!(validated.to_string().split("\n").count(), 7); + if let ValidateResult::Invalid(detail) = strategy.validate_packs(&mut scope).await? { + let error = StorageValidateError::from_detail(Some("scope_name"), detail).to_string(); + assert!(error.contains("validate scope `scope_name` failed due to some packs are modified")); + assert_eq!(error.split("\n").count(), 7); + } else { + panic!("should be invalid"); + } Ok(()) } @@ -236,7 +254,7 @@ mod tests { bucket_size: 10, pack_size: 100, }); - let mut mock_scope = PackScope::empty(scope_path.clone(), pack_options.clone()); + let mut mock_scope = PackScope::empty("scope_name", scope_path.clone(), pack_options.clone()); let updates = mock_updates(0, 100, 30, UpdateVal::Value("val".to_string())); strategy .update_scope(&mut mock_scope, updates) diff --git a/crates/rspack_storage/src/pack/strategy/split/write_pack.rs b/crates/rspack_storage/src/pack/strategy/split/write_pack.rs index e7c6aba024c8..c25bdf82a35c 100644 --- a/crates/rspack_storage/src/pack/strategy/split/write_pack.rs +++ b/crates/rspack_storage/src/pack/strategy/split/write_pack.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use async_trait::async_trait; use itertools::Itertools; -use rspack_error::{error, Result}; +use rspack_error::Result; use rspack_paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -137,7 +137,7 @@ impl PackWriteStrategy for SplitPackStrategy { let keys = pack.keys.expect_value(); let contents = pack.contents.expect_value(); if keys.len() != contents.len() { - return Err(error!("pack keys and contents length not match")); + panic!("pack keys and contents length not match"); } let mut writer = self.fs.write_file(&path).await?; diff --git a/crates/rspack_storage/src/pack/strategy/split/write_scope.rs b/crates/rspack_storage/src/pack/strategy/split/write_scope.rs index 33e4f68c25e3..58719090c09b 100644 --- a/crates/rspack_storage/src/pack/strategy/split/write_scope.rs +++ b/crates/rspack_storage/src/pack/strategy/split/write_scope.rs @@ -1,10 +1,10 @@ use async_trait::async_trait; use futures::future::join_all; -use futures::TryFutureExt; use itertools::Itertools; use rayon::iter::{IntoParallelIterator, ParallelBridge, ParallelIterator}; use rspack_error::{error, Result}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use tokio::task::JoinError; use super::{ handle_file::{ @@ -70,7 +70,7 @@ impl ScopeWriteStrategy for SplitPackStrategy { fn update_scope(&self, scope: &mut PackScope, updates: ScopeUpdate) -> Result<()> { if !scope.loaded() { - return Err(error!("scope not loaded, run `load` first")); + panic!("scope not loaded, run `load` first"); } let mut scope_meta = scope.meta.take_value().expect("should have scope meta"); let mut scope_packs = scope.packs.take_value().expect("should have scope packs"); @@ -241,7 +241,7 @@ async fn save_pack(pack: &Pack, strategy: &SplitPackStrategy) -> Result let keys = pack.keys.expect_value(); let contents = pack.contents.expect_value(); if keys.len() != contents.len() { - return Err(error!("pack keys and contents length not match")); + panic!("pack keys and contents length not match"); } strategy.write_pack(pack).await?; let hash = strategy @@ -261,13 +261,13 @@ async fn batch_write_packs( let tasks = packs.into_iter().map(|pack| { let strategy = strategy.to_owned(); tokio::spawn(async move { (save_pack(&pack, &strategy).await, pack) }) - .map_err(|e| error!("{}", e)) }); let task_result = join_all(tasks) .await .into_iter() - .collect::, Pack)>>>()?; + .collect::, Pack)>, JoinError>>() + .map_err(|e| error!("{}", e))?; let mut res = vec![]; for (hash, pack) in task_result { @@ -489,7 +489,11 @@ mod tests { bucket_size: 1, pack_size: 32, }); - let mut scope = PackScope::empty(strategy.get_path("scope_name"), options.clone()); + let mut scope = PackScope::empty( + "scope_name", + strategy.get_path("scope_name"), + options.clone(), + ); clean_strategy(&strategy).await; let _ = test_single_bucket(&mut scope, &strategy) @@ -508,7 +512,11 @@ mod tests { bucket_size: 10, pack_size: 32, }); - let mut scope = PackScope::empty(strategy.get_path("scope_name"), options.clone()); + let mut scope = PackScope::empty( + "scope_name", + strategy.get_path("scope_name"), + options.clone(), + ); clean_strategy(&strategy).await; let _ = test_multi_bucket(&mut scope, &strategy).await.map_err(|e| { @@ -525,7 +533,11 @@ mod tests { bucket_size: 1, pack_size: 2000, }); - let mut scope = PackScope::empty(strategy.get_path("scope_name"), options.clone()); + let mut scope = PackScope::empty( + "scope_name", + strategy.get_path("scope_name"), + options.clone(), + ); clean_strategy(&strategy).await; let _ = test_big_bucket(&mut scope, &strategy).await.map_err(|e| { diff --git a/crates/rspack_storage/tests/build.rs b/crates/rspack_storage/tests/build.rs index 6b262afc3e84..588051787f3e 100644 --- a/crates/rspack_storage/tests/build.rs +++ b/crates/rspack_storage/tests/build.rs @@ -5,7 +5,7 @@ mod test_storage_build { use rspack_error::Result; use rspack_fs::{MemoryFileSystem, NativeFileSystem}; use rspack_paths::{AssertUtf8, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; pub fn get_native_path(p: &str) -> (PathBuf, PathBuf) { let base = std::env::temp_dir() @@ -23,7 +23,7 @@ mod test_storage_build { root: &Utf8PathBuf, temp_root: &Utf8PathBuf, version: &str, - fs: Arc, + fs: Arc, ) -> PackStorageOptions { PackStorageOptions { version: version.to_string(), @@ -39,7 +39,7 @@ mod test_storage_build { async fn test_initial_build( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -60,7 +60,7 @@ mod test_storage_build { async fn test_recovery_modify( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -80,7 +80,7 @@ mod test_storage_build { async fn test_recovery_final( _root: &Utf8PathBuf, - _fs: Arc, + _fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -111,11 +111,11 @@ mod test_storage_build { let cases = [ ( get_native_path("test_build_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_build_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; let version = "xxx".to_string(); diff --git a/crates/rspack_storage/tests/dev.rs b/crates/rspack_storage/tests/dev.rs index 8e3fca96631c..4047b3af85d5 100644 --- a/crates/rspack_storage/tests/dev.rs +++ b/crates/rspack_storage/tests/dev.rs @@ -5,7 +5,7 @@ mod test_storage_dev { use rspack_error::Result; use rspack_fs::{MemoryFileSystem, NativeFileSystem}; use rspack_paths::{AssertUtf8, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; pub fn get_native_path(p: &str) -> (PathBuf, PathBuf) { let base = std::env::temp_dir() @@ -23,7 +23,7 @@ mod test_storage_dev { root: &Utf8PathBuf, temp_root: &Utf8PathBuf, version: &str, - fs: Arc, + fs: Arc, ) -> PackStorageOptions { PackStorageOptions { version: version.to_string(), @@ -39,7 +39,7 @@ mod test_storage_dev { async fn test_initial_dev( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -83,7 +83,7 @@ mod test_storage_dev { async fn test_recovery_modify( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -114,7 +114,7 @@ mod test_storage_dev { async fn test_recovery_final( _root: &Utf8PathBuf, - _fs: Arc, + _fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -146,11 +146,11 @@ mod test_storage_dev { let cases = [ ( get_native_path("test_dev_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_dev_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; let version = "xxx".to_string(); diff --git a/crates/rspack_storage/tests/error.rs b/crates/rspack_storage/tests/error.rs index 1af967e1149c..c49bcfb4ae8a 100644 --- a/crates/rspack_storage/tests/error.rs +++ b/crates/rspack_storage/tests/error.rs @@ -5,7 +5,7 @@ mod test_storage_error { use rspack_error::Result; use rspack_fs::{MemoryFileSystem, NativeFileSystem}; use rspack_paths::{AssertUtf8, Utf8Path, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; pub fn get_native_path(p: &str) -> (PathBuf, PathBuf) { let base = std::env::temp_dir() @@ -23,7 +23,7 @@ mod test_storage_error { root: &Utf8PathBuf, temp_root: &Utf8PathBuf, version: &str, - fs: Arc, + fs: Arc, ) -> PackStorageOptions { PackStorageOptions { version: version.to_string(), @@ -39,7 +39,7 @@ mod test_storage_error { async fn test_initial_error( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -62,7 +62,7 @@ mod test_storage_error { async fn test_recovery_invalid_meta( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -92,7 +92,7 @@ mod test_storage_error { async fn get_first_pack( scope_name: &str, meta_path: &Utf8Path, - fs: &dyn PackFS, + fs: &dyn StorageFS, ) -> Result { let mut reader = fs.read_file(meta_path).await?; reader.read_line().await?; @@ -112,7 +112,7 @@ mod test_storage_error { async fn test_recovery_remove_pack( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -126,7 +126,7 @@ mod test_storage_error { // test assert!(storage.load("test_scope").await.is_err_and(|e| { e.to_string() - .contains("validation failed due to some packs are modified") + .contains("validate scope `test_scope` failed due to some packs are modified") })); // resume @@ -139,7 +139,7 @@ mod test_storage_error { async fn test_recovery_modified_pack( _root: &Utf8PathBuf, - _fs: Arc, + _fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -147,7 +147,7 @@ mod test_storage_error { // test assert!(storage.load("test_scope").await.is_err_and(|e| { e.to_string() - .contains("validation failed due to some packs are modified") + .contains("validate scope `test_scope` failed due to some packs are modified") })); Ok(()) @@ -159,11 +159,11 @@ mod test_storage_error { let cases = [ ( get_native_path("test_error_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_error_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; let version = "xxx".to_string(); diff --git a/crates/rspack_storage/tests/expire.rs b/crates/rspack_storage/tests/expire.rs index 7bc659175002..dea27bfdd733 100644 --- a/crates/rspack_storage/tests/expire.rs +++ b/crates/rspack_storage/tests/expire.rs @@ -5,7 +5,7 @@ mod test_storage_expire { use rspack_error::Result; use rspack_fs::{MemoryFileSystem, NativeFileSystem}; use rspack_paths::{AssertUtf8, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; pub fn get_native_path(p: &str) -> (PathBuf, PathBuf) { let base = std::env::temp_dir() @@ -23,7 +23,7 @@ mod test_storage_expire { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -59,7 +59,7 @@ mod test_storage_expire { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -73,7 +73,7 @@ mod test_storage_expire { }); assert!(storage.load("test_scope").await.is_err_and(|e| { e.to_string() - .contains("validation failed due to cache expired") + .contains("validate scope `test_scope` failed due to expiration") })); Ok(()) @@ -84,7 +84,7 @@ mod test_storage_expire { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -124,11 +124,11 @@ mod test_storage_expire { let cases = [ ( get_native_path("test_expire_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_expire_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; diff --git a/crates/rspack_storage/tests/lock.rs b/crates/rspack_storage/tests/lock.rs index 113e5f1b689d..60f0c8ea88c9 100644 --- a/crates/rspack_storage/tests/lock.rs +++ b/crates/rspack_storage/tests/lock.rs @@ -8,18 +8,18 @@ mod test_storage_lock { use rspack_error::{error, Result}; use rspack_fs::{FileMetadata, MemoryFileSystem, NativeFileSystem, ReadStream, WriteStream}; use rspack_paths::{AssertUtf8, Utf8Path, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; use rustc_hash::FxHashSet as HashSet; #[derive(Debug)] - pub struct MockPackFS { - pub fs: Arc, + pub struct MoakStorageFS { + pub fs: Arc, pub moved: AtomicUsize, pub break_on: usize, } #[async_trait::async_trait] - impl PackFS for MockPackFS { + impl StorageFS for MoakStorageFS { async fn exists(&self, path: &Utf8Path) -> Result { self.fs.exists(path).await } @@ -81,7 +81,7 @@ mod test_storage_lock { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -117,7 +117,7 @@ mod test_storage_lock { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -137,7 +137,7 @@ mod test_storage_lock { version: &str, root: &Utf8PathBuf, temp_root: &Utf8PathBuf, - fs: Arc, + fs: Arc, ) -> Result<()> { let storage = PackStorage::new(PackStorageOptions { version: version.to_string(), @@ -162,11 +162,11 @@ mod test_storage_lock { let cases = [ ( get_native_path("test_lock_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_lock_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; @@ -182,7 +182,7 @@ mod test_storage_lock { "xxx", &root, &temp_root, - Arc::new(MockPackFS { + Arc::new(MoakStorageFS { fs: fs.clone(), moved: AtomicUsize::new(0), break_on: 3, @@ -195,7 +195,7 @@ mod test_storage_lock { "xxx", &root, &temp_root, - Arc::new(MockPackFS { + Arc::new(MoakStorageFS { fs: fs.clone(), moved: AtomicUsize::new(0), break_on: 9999, @@ -212,11 +212,11 @@ mod test_storage_lock { let cases = [ ( get_native_path("test_lock_fail_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_lock_fail_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; @@ -232,7 +232,7 @@ mod test_storage_lock { "xxx", &root, &temp_root, - Arc::new(MockPackFS { + Arc::new(MoakStorageFS { fs: fs.clone(), moved: AtomicUsize::new(0), break_on: 3, @@ -245,7 +245,7 @@ mod test_storage_lock { "xxx", &root, &temp_root.join("other"), - Arc::new(MockPackFS { + Arc::new(MoakStorageFS { fs: fs.clone(), moved: AtomicUsize::new(0), break_on: 9999, diff --git a/crates/rspack_storage/tests/multi.rs b/crates/rspack_storage/tests/multi.rs index 498a857f990c..a33234eb4a15 100644 --- a/crates/rspack_storage/tests/multi.rs +++ b/crates/rspack_storage/tests/multi.rs @@ -5,7 +5,7 @@ mod test_storage_multi { use rspack_error::Result; use rspack_fs::{MemoryFileSystem, NativeFileSystem}; use rspack_paths::{AssertUtf8, Utf8PathBuf}; - use rspack_storage::{PackBridgeFS, PackFS, PackStorage, PackStorageOptions, Storage}; + use rspack_storage::{PackStorage, PackStorageOptions, Storage, StorageBridgeFS, StorageFS}; pub fn get_native_path(p: &str) -> (PathBuf, PathBuf) { let base = std::env::temp_dir() @@ -23,7 +23,7 @@ mod test_storage_multi { root: &Utf8PathBuf, temp_root: &Utf8PathBuf, version: &str, - fs: Arc, + fs: Arc, ) -> PackStorageOptions { PackStorageOptions { version: version.to_string(), @@ -39,7 +39,7 @@ mod test_storage_multi { async fn test_initial_build( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -68,7 +68,7 @@ mod test_storage_multi { async fn test_recovery_modify( root: &Utf8PathBuf, - fs: Arc, + fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -104,7 +104,7 @@ mod test_storage_multi { async fn test_recovery_final( _root: &Utf8PathBuf, - _fs: Arc, + _fs: Arc, options: PackStorageOptions, ) -> Result<()> { let storage = PackStorage::new(options); @@ -156,11 +156,11 @@ mod test_storage_multi { let cases = [ ( get_native_path("test_multi_native"), - Arc::new(PackBridgeFS(Arc::new(NativeFileSystem {}))), + Arc::new(StorageBridgeFS(Arc::new(NativeFileSystem {}))), ), ( get_memory_path("test_multi_memory"), - Arc::new(PackBridgeFS(Arc::new(MemoryFileSystem::default()))), + Arc::new(StorageBridgeFS(Arc::new(MemoryFileSystem::default()))), ), ]; let version = "xxx".to_string();