diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d91578c4996a..17071bfcf09e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -159,7 +159,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: crate-ci/typos@v1.27.3 + - uses: crate-ci/typos@v1.28.1 with: files: . @@ -288,7 +288,7 @@ jobs: - uses: ./.github/actions/rustup with: shared-key: check - - uses: cargo-bins/cargo-binstall@v1.10.13 + - uses: cargo-bins/cargo-binstall@v1.10.14 - run: cargo binstall --no-confirm cargo-shear - run: cargo shear diff --git a/Cargo.lock b/Cargo.lock index 3d7024d51db5..43f92a258c9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3669,6 +3669,7 @@ dependencies = [ "once_cell", "rspack_cacheable", "rspack_resolver", + "rspack_sources", "rustc-hash 1.1.0", "serde_json", "swc_core", @@ -3775,6 +3776,7 @@ dependencies = [ name = "rspack_fs" version = "0.1.0" dependencies = [ + "async-trait", "dunce", "futures", "rspack_error", @@ -3786,6 +3788,7 @@ dependencies = [ name = "rspack_fs_node" version = "0.1.0" dependencies = [ + "async-trait", "futures", "napi", "napi-build", diff --git a/crates/node_binding/binding.d.ts b/crates/node_binding/binding.d.ts index 0fd15cd05604..85d56f7f798f 100644 --- a/crates/node_binding/binding.d.ts +++ b/crates/node_binding/binding.d.ts @@ -1335,6 +1335,7 @@ export interface RawEntryDynamicResult { export interface RawEnvironment { const?: boolean arrowFunction?: boolean + nodePrefixForCoreModules?: boolean } export interface RawEvalDevToolModulePluginOptions { @@ -1722,6 +1723,7 @@ export interface RawOptions { experiments: RawExperiments node?: RawNodeOption profile: boolean + amd?: string bail: boolean __references: Record } diff --git a/crates/node_binding/package.json b/crates/node_binding/package.json index 7fe825ce5797..34d2722e72c1 100644 --- a/crates/node_binding/package.json +++ b/crates/node_binding/package.json @@ -1,6 +1,6 @@ { "name": "@rspack/binding", - "version": "1.1.3", + "version": "1.1.4", "license": "MIT", "description": "Node binding for rspack", "main": "binding.js", diff --git a/crates/node_binding/src/lib.rs b/crates/node_binding/src/lib.rs index 6cc3aec145c7..e3b177059cc9 100644 --- a/crates/node_binding/src/lib.rs +++ b/crates/node_binding/src/lib.rs @@ -13,7 +13,7 @@ use napi::bindgen_prelude::*; use rspack_binding_options::BuiltinPlugin; use rspack_core::{Compilation, PluginExt}; use rspack_error::Diagnostic; -use rspack_fs_node::{AsyncNodeWritableFileSystem, ThreadsafeNodeFS}; +use rspack_fs_node::{NodeFileSystem, ThreadsafeNodeFS}; mod compiler; mod diagnostic; @@ -70,10 +70,9 @@ impl Rspack { compiler_options, plugins, rspack_binding_options::buildtime_plugins::buildtime_plugins(), - Some(Box::new( - AsyncNodeWritableFileSystem::new(output_filesystem) - .map_err(|e| Error::from_reason(format!("Failed to create writable filesystem: {e}",)))?, - )), + Some(Box::new(NodeFileSystem::new(output_filesystem).map_err( + |e| Error::from_reason(format!("Failed to create writable filesystem: {e}",)), + )?)), None, Some(resolver_factory), Some(loader_resolver_factory), diff --git a/crates/node_binding/src/resolver_factory.rs b/crates/node_binding/src/resolver_factory.rs index b6f140a66ec1..e9eeb78cb24a 100644 --- a/crates/node_binding/src/resolver_factory.rs +++ b/crates/node_binding/src/resolver_factory.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use napi_derive::napi; use rspack_core::{Resolve, ResolverFactory}; -use rspack_fs::{NativeFileSystem, SyncReadableFileSystem}; +use rspack_fs::{FileSystem, NativeFileSystem}; use crate::{ raw_resolve::{ @@ -15,7 +15,7 @@ use crate::{ pub struct JsResolverFactory { pub(crate) resolver_factory: Option>, pub(crate) loader_resolver_factory: Option>, - pub(crate) input_filesystem: Arc, + pub(crate) input_filesystem: Arc, } #[napi] diff --git a/crates/rspack_binding_options/src/options/mod.rs b/crates/rspack_binding_options/src/options/mod.rs index a82b3f393639..e2fb75b51e4d 100644 --- a/crates/rspack_binding_options/src/options/mod.rs +++ b/crates/rspack_binding_options/src/options/mod.rs @@ -54,6 +54,7 @@ pub struct RawOptions { pub experiments: RawExperiments, pub node: Option, pub profile: bool, + pub amd: Option, pub bail: bool, #[napi(js_name = "__references", ts_type = "Record")] pub __references: References, @@ -93,6 +94,7 @@ impl TryFrom for CompilerOptions { optimization, node, profile: value.profile, + amd: value.amd, bail: value.bail, __references: value.__references, }) diff --git a/crates/rspack_binding_options/src/options/raw_output.rs b/crates/rspack_binding_options/src/options/raw_output.rs index 61a8410f2140..b9c11400b335 100644 --- a/crates/rspack_binding_options/src/options/raw_output.rs +++ b/crates/rspack_binding_options/src/options/raw_output.rs @@ -47,6 +47,7 @@ impl From for CrossOriginLoading { pub struct RawEnvironment { pub r#const: Option, pub arrow_function: Option, + pub node_prefix_for_core_modules: Option, } impl From for Environment { @@ -54,6 +55,7 @@ impl From for Environment { Self { r#const: value.r#const, arrow_function: value.arrow_function, + node_prefix_for_core_modules: value.node_prefix_for_core_modules, } } } diff --git a/crates/rspack_binding_values/src/compilation/mod.rs b/crates/rspack_binding_values/src/compilation/mod.rs index 1a230087a38c..cf53a60c1b17 100644 --- a/crates/rspack_binding_values/src/compilation/mod.rs +++ b/crates/rspack_binding_values/src/compilation/mod.rs @@ -533,9 +533,10 @@ impl JsCompilation { ) -> napi::Result { let compilation = self.as_ref()?; - let path_and_asset_info = - compilation.get_path_with_info(&filename.into(), data.to_path_data())?; - Ok(path_and_asset_info.into()) + let mut asset_info = AssetInfo::default(); + let path = + compilation.get_path_with_info(&filename.into(), data.to_path_data(), &mut asset_info)?; + Ok((path, asset_info).into()) } #[napi] diff --git a/crates/rspack_cacheable/src/context.rs b/crates/rspack_cacheable/src/context.rs index 6a37e24e36b3..9a33663a20a8 100644 --- a/crates/rspack_cacheable/src/context.rs +++ b/crates/rspack_cacheable/src/context.rs @@ -20,7 +20,7 @@ impl<'a> ContextGuard<'a> { Self { context } } - pub fn add_to_sharing>( + pub fn add_to_sharing + ?Sized>( &self, sharing: &mut S, ) -> Result<(), SerializeError> { @@ -28,7 +28,7 @@ impl<'a> ContextGuard<'a> { sharing.finish_sharing(CONTEXT_ADDR, self as *const _ as usize) } - pub fn sharing_context>( + pub fn sharing_context + ?Sized>( sharing: &'a mut S, ) -> Result<&'a dyn Any, SerializeError> { match sharing.start_sharing(CONTEXT_ADDR) { @@ -40,7 +40,7 @@ impl<'a> ContextGuard<'a> { } } - pub fn add_to_pooling>( + pub fn add_to_pooling + ?Sized>( &self, pooling: &mut P, ) -> Result<(), DeserializeError> { @@ -51,7 +51,7 @@ impl<'a> ContextGuard<'a> { } } - pub fn pooling_context>( + pub fn pooling_context + ?Sized>( pooling: &'a mut P, ) -> Result<&'a dyn Any, DeserializeError> { match pooling.start_pooling(CONTEXT_ADDR) { diff --git a/crates/rspack_cacheable/src/lib.rs b/crates/rspack_cacheable/src/lib.rs index aaf1941ed65e..9d09d29b261c 100644 --- a/crates/rspack_cacheable/src/lib.rs +++ b/crates/rspack_cacheable/src/lib.rs @@ -3,7 +3,6 @@ pub use rspack_macros::{cacheable, cacheable_dyn}; #[cfg(feature = "noop")] pub use rspack_macros::{disable_cacheable as cacheable, disable_cacheable_dyn as cacheable_dyn}; pub mod r#dyn; -pub mod utils; pub mod with; mod context; diff --git a/crates/rspack_cacheable/src/utils/mod.rs b/crates/rspack_cacheable/src/utils/mod.rs deleted file mode 100644 index 9e149f6f4cd3..000000000000 --- a/crates/rspack_cacheable/src/utils/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod type_wrapper; - -pub use type_wrapper::{TypeWrapper, TypeWrapperRef}; diff --git a/crates/rspack_cacheable/src/utils/type_wrapper.rs b/crates/rspack_cacheable/src/utils/type_wrapper.rs deleted file mode 100644 index 150e9599ad5e..000000000000 --- a/crates/rspack_cacheable/src/utils/type_wrapper.rs +++ /dev/null @@ -1,108 +0,0 @@ -use rkyv::{ - bytecheck::{CheckBytes, StructCheckContext}, - rancor::{Fallible, Source, Trace}, - ser::{Allocator, Writer}, - string::{ArchivedString, StringResolver}, - vec::{ArchivedVec, VecResolver}, - Archive, Deserialize, Place, Portable, Serialize, -}; - -pub struct TypeWrapperRef<'a> { - pub type_name: &'a str, - pub bytes: &'a [u8], -} - -pub struct ArchivedTypeWrapper { - type_name: ArchivedString, - bytes: ArchivedVec, -} - -unsafe impl Portable for ArchivedTypeWrapper {} - -pub struct TypeWrapper { - pub type_name: String, - pub bytes: Vec, -} - -impl<'a> Archive for TypeWrapperRef<'a> { - type Archived = ArchivedTypeWrapper; - type Resolver = (StringResolver, VecResolver); - - #[inline] - fn resolve(&self, resolver: Self::Resolver, out: Place) { - let field_ptr = unsafe { &raw mut (*out.ptr()).type_name }; - let field_out = unsafe { Place::from_field_unchecked(out, field_ptr) }; - ArchivedString::resolve_from_str(self.type_name, resolver.0, field_out); - let field_ptr = unsafe { &raw mut (*out.ptr()).bytes }; - let field_out = unsafe { Place::from_field_unchecked(out, field_ptr) }; - ArchivedVec::resolve_from_len(self.bytes.len(), resolver.1, field_out); - } -} - -impl Archive for TypeWrapper { - type Archived = ArchivedTypeWrapper; - type Resolver = (StringResolver, VecResolver); - - #[inline] - fn resolve(&self, _resolver: Self::Resolver, _out: Place) { - unreachable!() - } -} - -impl<'a, S> Serialize for TypeWrapperRef<'a> -where - S: ?Sized + Fallible + Writer + Allocator, - S::Error: Source, -{ - #[inline] - fn serialize(&self, serializer: &mut S) -> Result { - Ok(( - ArchivedString::serialize_from_str(self.type_name, serializer)?, - ArchivedVec::serialize_from_slice(self.bytes, serializer)?, - )) - } -} - -unsafe impl CheckBytes for ArchivedTypeWrapper -where - ArchivedString: CheckBytes, - ArchivedVec: CheckBytes, - C: Fallible + ?Sized, - C::Error: Trace, -{ - unsafe fn check_bytes(bytes: *const Self, context: &mut C) -> Result<(), C::Error> { - ArchivedString::check_bytes(core::ptr::addr_of!((*bytes).type_name), context).map_err(|e| { - ::trace( - e, - StructCheckContext { - struct_name: "ArchivedTypeWrapper", - field_name: "type_name", - }, - ) - })?; - ArchivedVec::::check_bytes(core::ptr::addr_of!((*bytes).bytes), context).map_err(|e| { - ::trace( - e, - StructCheckContext { - struct_name: "ArchivedTypeWrapper", - field_name: "bytes", - }, - ) - })?; - Ok(()) - } -} - -impl Deserialize for ArchivedTypeWrapper -where - D: Fallible + ?Sized, - D::Error: Source, -{ - #[inline] - fn deserialize(&self, deserializer: &mut D) -> Result { - Ok(TypeWrapper { - type_name: Deserialize::::deserialize(&self.type_name, deserializer)?, - bytes: Deserialize::, D>::deserialize(&self.bytes, deserializer)?, - }) - } -} diff --git a/crates/rspack_cacheable/src/with/as.rs b/crates/rspack_cacheable/src/with/as.rs index 6167960f68cb..6610a11ac740 100644 --- a/crates/rspack_cacheable/src/with/as.rs +++ b/crates/rspack_cacheable/src/with/as.rs @@ -43,7 +43,7 @@ where impl SerializeWith for As where A: AsConverter + Archive + Serialize, - S: Fallible + Sharing, + S: Fallible + Sharing + ?Sized, { #[inline] fn serialize_with(field: &T, serializer: &mut S) -> Result { @@ -60,7 +60,7 @@ impl DeserializeWith, T, D> for As where A: AsConverter + Archive, A::Archived: Deserialize, - D: Fallible + Pooling, + D: Fallible + Pooling + ?Sized, { #[inline] fn deserialize_with(field: &Archived, de: &mut D) -> Result { diff --git a/crates/rspack_cacheable/src/with/as_cacheable.rs b/crates/rspack_cacheable/src/with/as_cacheable.rs index 8e5585c612bb..2532ce6e18a0 100644 --- a/crates/rspack_cacheable/src/with/as_cacheable.rs +++ b/crates/rspack_cacheable/src/with/as_cacheable.rs @@ -20,7 +20,7 @@ impl ArchiveWith for AsCacheable { impl SerializeWith for AsCacheable where T: Archive + Serialize, - S: ?Sized + Fallible, + S: Fallible + ?Sized, { #[inline] fn serialize_with(field: &T, serializer: &mut S) -> Result { @@ -32,7 +32,7 @@ impl DeserializeWith, T, D> for AsCacheable where T: Archive, T::Archived: Deserialize, - D: ?Sized + Fallible, + D: Fallible + ?Sized, { #[inline] fn deserialize_with(field: &Archived, de: &mut D) -> Result { diff --git a/crates/rspack_cacheable/src/with/as_inner.rs b/crates/rspack_cacheable/src/with/as_inner.rs index 5d30ff326142..8b78295e2887 100644 --- a/crates/rspack_cacheable/src/with/as_inner.rs +++ b/crates/rspack_cacheable/src/with/as_inner.rs @@ -48,7 +48,7 @@ impl DeserializeWith for AsInner where T: AsInnerConverter, A: ArchiveWith + DeserializeWith, - D: ?Sized + Fallible, + D: Fallible + ?Sized, { fn deserialize_with(field: &A::Archived, d: &mut D) -> Result { Ok(T::from_inner(A::deserialize_with(field, d)?)) diff --git a/crates/rspack_cacheable/src/with/as_map.rs b/crates/rspack_cacheable/src/with/as_map.rs index 904e494a8816..538154741533 100644 --- a/crates/rspack_cacheable/src/with/as_map.rs +++ b/crates/rspack_cacheable/src/with/as_map.rs @@ -57,10 +57,11 @@ where } } -impl Serialize for Entry<&'_ K, &'_ V, WK, WV> +impl Serialize for Entry<&'_ K, &'_ V, WK, WV> where WK: SerializeWith, WV: SerializeWith, + S: Fallible + ?Sized, { #[inline] fn serialize(&self, serializer: &mut S) -> Result { @@ -90,7 +91,7 @@ where T: AsMapConverter, WK: ArchiveWith, WV: ArchiveWith, - S: Fallible + ?Sized + Allocator + Writer, + S: Fallible + Allocator + Writer + ?Sized, for<'a> Entry<&'a K, &'a V, WK, WV>: Serialize, { fn serialize_with(field: &T, s: &mut S) -> Result { diff --git a/crates/rspack_cacheable/src/with/as_preset/mod.rs b/crates/rspack_cacheable/src/with/as_preset/mod.rs index 6fd04954a6f0..c26358bd9414 100644 --- a/crates/rspack_cacheable/src/with/as_preset/mod.rs +++ b/crates/rspack_cacheable/src/with/as_preset/mod.rs @@ -2,7 +2,7 @@ mod camino; mod json; mod lightningcss; mod rspack_resolver; -mod rspack_source; +mod rspack_sources; mod serde_json; mod swc; mod ustr; diff --git a/crates/rspack_cacheable/src/with/as_preset/rspack_source/mod.rs b/crates/rspack_cacheable/src/with/as_preset/rspack_source/mod.rs deleted file mode 100644 index 89bafbe7774d..000000000000 --- a/crates/rspack_cacheable/src/with/as_preset/rspack_source/mod.rs +++ /dev/null @@ -1,103 +0,0 @@ -use std::sync::Arc; - -use rkyv::{ - rancor::Fallible, - ser::{Allocator, Writer}, - vec::{ArchivedVec, VecResolver}, - with::{ArchiveWith, DeserializeWith, SerializeWith}, - Place, -}; -use rspack_sources::{ - OriginalSource, RawSource, Source, SourceMap, SourceMapSource, SourceMapSourceOptions, -}; - -use super::AsPreset; -use crate::{ - utils::{TypeWrapper, TypeWrapperRef}, - DeserializeError, SerializeError, -}; - -pub struct SourceResolver { - inner: VecResolver, - len: usize, -} - -impl ArchiveWith> for AsPreset { - type Archived = ArchivedVec; - type Resolver = SourceResolver; - - #[inline] - fn resolve_with(_field: &Arc, resolver: Self::Resolver, out: Place) { - ArchivedVec::resolve_from_len(resolver.len, resolver.inner, out) - } -} - -// TODO add cacheable to rspack-sources -impl SerializeWith, S> for AsPreset -where - S: Fallible + Writer + Allocator, -{ - fn serialize_with( - field: &Arc, - serializer: &mut S, - ) -> Result { - let inner = field.as_ref().as_any(); - let bytes = if let Some(raw_source) = inner.downcast_ref::() { - let data = TypeWrapperRef { - type_name: "RawSource", - bytes: &raw_source.buffer(), - }; - crate::to_bytes(&data, &())? - } else if let Some(original_source) = inner.downcast_ref::() { - let source = original_source.source(); - let data = Some(TypeWrapperRef { - type_name: "OriginalSource", - bytes: source.as_bytes(), - }); - crate::to_bytes(&data, &())? - } else if let Some(source_map_source) = inner.downcast_ref::() { - let source = source_map_source.source(); - let data = Some(TypeWrapperRef { - type_name: "SourceMapSource", - bytes: source.as_bytes(), - }); - crate::to_bytes(&data, &())? - } else { - return Err(SerializeError::MessageError("unsupported rspack source")); - }; - Ok(SourceResolver { - inner: ArchivedVec::serialize_from_slice(&bytes, serializer)?, - len: bytes.len(), - }) - } -} - -impl DeserializeWith, Arc, D> for AsPreset -where - D: Fallible, -{ - fn deserialize_with( - field: &ArchivedVec, - _de: &mut D, - ) -> Result, DeserializeError> { - let TypeWrapper { type_name, bytes } = crate::from_bytes(field, &())?; - match type_name.as_str() { - // TODO change to enum - "RawSource" => Ok(Arc::new(RawSource::from(bytes))), - // TODO save original source name - "OriginalSource" => Ok(Arc::new(OriginalSource::new( - "a", - String::from_utf8(bytes).expect("unexpected bytes"), - ))), - "SourceMapSource" => Ok(Arc::new(SourceMapSource::new(SourceMapSourceOptions { - value: String::from_utf8(bytes).expect("unexpected bytes"), - name: String::from("a"), - source_map: SourceMap::default(), - original_source: None, - inner_source_map: None, - remove_original_source: true, - }))), - _ => Err(DeserializeError::MessageError("unsupported box source")), - } - } -} diff --git a/crates/rspack_cacheable/src/with/as_preset/rspack_sources/mod.rs b/crates/rspack_cacheable/src/with/as_preset/rspack_sources/mod.rs new file mode 100644 index 000000000000..c91cea71193e --- /dev/null +++ b/crates/rspack_cacheable/src/with/as_preset/rspack_sources/mod.rs @@ -0,0 +1,86 @@ +use rkyv::{ + rancor::Fallible, + ser::{Allocator, Writer}, + with::{ArchiveWith, DeserializeWith, SerializeWith}, + Archive, Archived, Deserialize, Place, Resolver, Serialize, +}; +use rspack_sources::{ + BoxSource, RawSource, Source, SourceExt, SourceMap, SourceMapSource, WithoutOriginalOptions, +}; + +use super::AsPreset; +use crate::{cacheable, DeserializeError, SerializeError}; + +#[cacheable(crate=crate)] +pub struct CacheableSource { + buffer: Vec, + map: Option, +} + +pub struct InnerResolver { + source: CacheableSource, + resolver: Resolver, +} + +impl ArchiveWith for AsPreset { + type Archived = Archived; + type Resolver = InnerResolver; + + #[inline] + fn resolve_with(_field: &BoxSource, resolver: Self::Resolver, out: Place) { + let InnerResolver { source, resolver } = resolver; + source.resolve(resolver, out) + } +} + +impl SerializeWith for AsPreset +where + S: Fallible + Allocator + Writer, +{ + fn serialize_with( + field: &BoxSource, + serializer: &mut S, + ) -> Result { + let map = match field.map(&Default::default()) { + Some(map) => Some( + map + .to_json() + .map_err(|_| SerializeError::MessageError("source map to json failed"))?, + ), + None => None, + }; + let source = CacheableSource { + buffer: field.buffer().to_vec(), + map, + }; + Ok(InnerResolver { + resolver: source.serialize(serializer)?, + source, + }) + } +} + +impl DeserializeWith, BoxSource, D> for AsPreset +where + D: Fallible, +{ + fn deserialize_with( + field: &Archived, + deserializer: &mut D, + ) -> Result { + let CacheableSource { buffer, map } = field.deserialize(deserializer)?; + if let Some(map) = &map { + if let Ok(source_map) = SourceMap::from_json(map) { + return Ok( + SourceMapSource::new(WithoutOriginalOptions { + value: String::from_utf8_lossy(&buffer), + name: "persistent-cache", + source_map, + }) + .boxed(), + ); + } + } + Ok(RawSource::from(buffer).boxed()) + } +} diff --git a/crates/rspack_cacheable/src/with/as_ref_str.rs b/crates/rspack_cacheable/src/with/as_ref_str.rs index 8d528d1f554d..ec1d1abaf784 100644 --- a/crates/rspack_cacheable/src/with/as_ref_str.rs +++ b/crates/rspack_cacheable/src/with/as_ref_str.rs @@ -31,7 +31,7 @@ where impl SerializeWith for AsRefStr where T: AsRefStrConverter, - S: ?Sized + Fallible + Writer, + S: Fallible + Writer + ?Sized, S::Error: Source, { #[inline] @@ -43,7 +43,7 @@ where impl DeserializeWith for AsRefStr where T: AsRefStrConverter, - D: ?Sized + Fallible, + D: Fallible + ?Sized, { #[inline] fn deserialize_with(field: &ArchivedString, _: &mut D) -> Result { diff --git a/crates/rspack_cacheable/src/with/as_string.rs b/crates/rspack_cacheable/src/with/as_string.rs index 82edfa18567b..a70037589664 100644 --- a/crates/rspack_cacheable/src/with/as_string.rs +++ b/crates/rspack_cacheable/src/with/as_string.rs @@ -39,7 +39,7 @@ where impl SerializeWith for AsString where T: AsStringConverter, - S: Fallible + Writer, + S: Fallible + Writer + ?Sized, { #[inline] fn serialize_with(field: &T, serializer: &mut S) -> Result { @@ -52,7 +52,7 @@ where impl DeserializeWith for AsString where T: AsStringConverter, - D: Fallible, + D: Fallible + ?Sized, { #[inline] fn deserialize_with(field: &ArchivedString, _: &mut D) -> Result { diff --git a/crates/rspack_cacheable/src/with/as_tuple2.rs b/crates/rspack_cacheable/src/with/as_tuple2.rs index 28c6c2105ee3..52e23de9b1cf 100644 --- a/crates/rspack_cacheable/src/with/as_tuple2.rs +++ b/crates/rspack_cacheable/src/with/as_tuple2.rs @@ -30,10 +30,11 @@ where } } -impl SerializeWith<(K, V), S> for AsTuple2 +impl SerializeWith<(K, V), S> for AsTuple2 where A: SerializeWith, B: SerializeWith, + S: Fallible + ?Sized, { #[inline] fn serialize_with(field: &(K, V), serializer: &mut S) -> Result { @@ -49,7 +50,7 @@ impl DeserializeWith, (K where A: ArchiveWith + DeserializeWith, B: ArchiveWith + DeserializeWith, - D: ?Sized + Fallible, + D: Fallible + ?Sized, { fn deserialize_with( field: &ArchivedTuple2, diff --git a/crates/rspack_cacheable/src/with/as_tuple3.rs b/crates/rspack_cacheable/src/with/as_tuple3.rs index 7ade245128a8..464ea248dcdb 100644 --- a/crates/rspack_cacheable/src/with/as_tuple3.rs +++ b/crates/rspack_cacheable/src/with/as_tuple3.rs @@ -34,11 +34,12 @@ where } } -impl SerializeWith<(K, V, H), S> for AsTuple3 +impl SerializeWith<(K, V, H), S> for AsTuple3 where A: SerializeWith, B: SerializeWith, C: SerializeWith, + S: Fallible + ?Sized, { #[inline] fn serialize_with(field: &(K, V, H), serializer: &mut S) -> Result { @@ -57,7 +58,7 @@ where A: ArchiveWith + DeserializeWith, B: ArchiveWith + DeserializeWith, C: ArchiveWith + DeserializeWith, - D: ?Sized + Fallible, + D: Fallible + ?Sized, { fn deserialize_with( field: &ArchivedTuple3, diff --git a/crates/rspack_cacheable/src/with/as_vec.rs b/crates/rspack_cacheable/src/with/as_vec.rs index fe92f29ca1e6..626055f5f3a3 100644 --- a/crates/rspack_cacheable/src/with/as_vec.rs +++ b/crates/rspack_cacheable/src/with/as_vec.rs @@ -79,7 +79,7 @@ where impl DeserializeWith, T, D> for AsVec where T: AsVecConverter, - D: Fallible, + D: Fallible + ?Sized, A: ArchiveWith + DeserializeWith, { fn deserialize_with(field: &ArchivedVec, d: &mut D) -> Result { diff --git a/crates/rspack_cacheable/src/with/inline.rs b/crates/rspack_cacheable/src/with/inline.rs new file mode 100644 index 000000000000..07a87fea12e0 --- /dev/null +++ b/crates/rspack_cacheable/src/with/inline.rs @@ -0,0 +1,35 @@ +use rkyv::{ + rancor::Fallible, + with::{ArchiveWith, SerializeWith}, + Place, +}; + +use crate::with::AsCacheable; + +pub struct Inline { + _inner: T, +} + +impl<'a, T, F> ArchiveWith<&'a F> for Inline +where + T: ArchiveWith, +{ + type Archived = T::Archived; + type Resolver = T::Resolver; + + #[inline] + fn resolve_with(field: &&F, resolver: Self::Resolver, out: Place) { + T::resolve_with(field, resolver, out) + } +} + +impl<'a, T, F, S> SerializeWith<&'a F, S> for Inline +where + T: SerializeWith, + S: Fallible + ?Sized, +{ + #[inline] + fn serialize_with(field: &&F, serializer: &mut S) -> Result { + T::serialize_with(field, serializer) + } +} diff --git a/crates/rspack_cacheable/src/with/mod.rs b/crates/rspack_cacheable/src/with/mod.rs index c0ee2335d9b4..0f33b4363345 100644 --- a/crates/rspack_cacheable/src/with/mod.rs +++ b/crates/rspack_cacheable/src/with/mod.rs @@ -9,6 +9,7 @@ mod as_string; mod as_tuple2; mod as_tuple3; mod as_vec; +mod inline; mod unsupported; pub use as_cacheable::AsCacheable; @@ -21,6 +22,7 @@ pub use as_string::{AsString, AsStringConverter}; pub use as_tuple2::AsTuple2; pub use as_tuple3::AsTuple3; pub use as_vec::{AsVec, AsVecConverter}; +pub use inline::Inline; pub use r#as::{As, AsConverter}; pub use rkyv::with::Map as AsOption; pub use rkyv::with::Skip; diff --git a/crates/rspack_cacheable/src/with/unsupported.rs b/crates/rspack_cacheable/src/with/unsupported.rs index 66841ffabcb6..7846a96516ee 100644 --- a/crates/rspack_cacheable/src/with/unsupported.rs +++ b/crates/rspack_cacheable/src/with/unsupported.rs @@ -17,7 +17,7 @@ impl ArchiveWith for Unsupported { impl SerializeWith for Unsupported where - S: Fallible, + S: Fallible + ?Sized, { fn serialize_with(_: &F, _: &mut S) -> Result<(), SerializeError> { Err(SerializeError::UnsupportedField) @@ -26,7 +26,7 @@ where impl DeserializeWith<(), F, D> for Unsupported where - D: Fallible, + D: Fallible + ?Sized, { fn deserialize_with(_: &(), _: &mut D) -> Result { Err(DeserializeError::UnsupportedField) diff --git a/crates/rspack_cacheable_test/Cargo.toml b/crates/rspack_cacheable_test/Cargo.toml index 16ea7992afa6..d99cdf48a24e 100644 --- a/crates/rspack_cacheable_test/Cargo.toml +++ b/crates/rspack_cacheable_test/Cargo.toml @@ -15,6 +15,7 @@ lightningcss = { workspace = true } once_cell = { workspace = true } rspack_cacheable = { path = "../rspack_cacheable" } rspack_resolver = { workspace = true } +rspack_sources = { workspace = true } rustc-hash = { workspace = true } serde_json = { workspace = true } swc_core = { workspace = true, features = ["ecma_ast"] } diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable.rs b/crates/rspack_cacheable_test/tests/macro/cacheable.rs deleted file mode 100644 index 9120931c5fe6..000000000000 --- a/crates/rspack_cacheable_test/tests/macro/cacheable.rs +++ /dev/null @@ -1,158 +0,0 @@ -use rspack_cacheable::{ - cacheable, from_bytes, to_bytes, - with::{AsMap, AsRefStr, AsRefStrConverter}, -}; - -#[test] -fn basic_macro_feature() { - #[cacheable] - #[derive(Debug, PartialEq, Eq)] - struct Person { - name: String, - } - - let a = Person { - name: String::from("a"), - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn hashable_attr() { - use rustc_hash::FxHashSet as HashSet; - #[cacheable(hashable)] - #[derive(Debug, Hash, PartialEq, Eq)] - struct Person { - name: String, - } - - let mut a = HashSet::default(); - a.insert(Person { - name: String::from("a"), - }); - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a: HashSet = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn with_attr() { - #[derive(Debug, PartialEq, Eq)] - struct UnCacheable; - - #[cacheable(with=AsRefStr)] - #[derive(Debug, PartialEq, Eq)] - struct Person { - name: String, - uncacheable: UnCacheable, - } - impl AsRefStrConverter for Person { - fn as_str(&self) -> &str { - &self.name - } - fn from_str(s: &str) -> Self { - Self { - name: String::from(s), - uncacheable: UnCacheable, - } - } - } - - let a = Person { - name: String::from("a"), - uncacheable: UnCacheable, - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn with_attr_with_generics() { - #[derive(Debug, Default, PartialEq, Eq)] - struct UnCacheable; - - #[cacheable(with=AsRefStr)] - #[derive(Debug, PartialEq, Eq)] - struct Person - where - T: Default, - { - name: String, - uncacheable: T, - } - impl AsRefStrConverter for Person { - fn as_str(&self) -> &str { - &self.name - } - fn from_str(s: &str) -> Self { - Self { - name: String::from(s), - uncacheable: Default::default(), - } - } - } - - let a = Person { - name: String::from("a"), - uncacheable: UnCacheable, - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn omit_bounds_attr() { - use dashmap::DashMap; - - #[cacheable] - #[derive(Debug, Clone)] - struct Value { - id: String, - #[cacheable(omit_bounds, with=AsMap)] - map: DashMap, - #[cacheable(omit_bounds)] - children: Vec, - } - - let map = DashMap::default(); - map.insert( - String::from("a"), - Value { - id: String::from("a"), - map: DashMap::default(), - children: vec![], - }, - ); - map.insert( - String::from("b"), - Value { - id: String::from("b"), - map: DashMap::default(), - children: vec![], - }, - ); - let value = Value { - id: String::from("root"), - children: map.iter().map(|item| item.value().clone()).collect(), - map, - }; - let bytes = to_bytes(&value, &()).unwrap(); - let new_value: Value = from_bytes(&bytes, &()).unwrap(); - - assert_eq!(value.id, new_value.id); - for (key, value) in new_value.map { - assert!(key == "a" || key == "b"); - assert!(value.id == "a" || value.id == "b"); - assert_eq!(value.map.len(), 0); - assert_eq!(value.children.len(), 0); - } - for value in new_value.children { - assert!(value.id == "a" || value.id == "b"); - assert_eq!(value.map.len(), 0); - assert_eq!(value.children.len(), 0); - } -} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/as_attr.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/as_attr.rs new file mode 100644 index 000000000000..d1e950764431 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/as_attr.rs @@ -0,0 +1,62 @@ +use rspack_cacheable::{ + cacheable, cacheable_dyn, from_bytes, to_bytes, + with::{AsOption, AsTuple2, AsVec, Inline}, +}; + +#[cacheable_dyn] +trait Module {} + +#[cacheable] +struct NormalModule { + inner: String, +} + +#[cacheable_dyn] +impl Module for NormalModule {} + +#[cacheable] +struct Data { + block1: String, + block2: Vec<(String, Option)>, + block3: Box, +} + +#[cacheable(as=Data)] +struct DataRef<'a> { + #[cacheable(with=Inline)] + block1: &'a String, + #[cacheable(with=AsVec>>)] + block2: Vec<(&'a String, Option<&'a String>)>, + #[allow(clippy::borrowed_box)] + #[cacheable(with=Inline)] + block3: &'a Box, +} + +#[test] +#[cfg_attr(miri, ignore)] +fn as_attr() { + let a = Data { + block1: "abc".into(), + block2: vec![ + ("key1".into(), None), + ("key2".into(), Some("value2".into())), + ("key3".into(), Some("value3".into())), + ], + block3: Box::new(NormalModule { + inner: "inner".into(), + }), + }; + let a_ref = DataRef { + block1: &a.block1, + block2: a + .block2 + .iter() + .map(|(key, value)| (key, value.as_ref())) + .collect(), + block3: &a.block3, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let bytes_ref = to_bytes(&a_ref, &()).unwrap(); + assert_eq!(bytes, bytes_ref); + from_bytes::(&bytes, &()).unwrap(); +} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/basic.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/basic.rs new file mode 100644 index 000000000000..701eebdc7726 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/basic.rs @@ -0,0 +1,17 @@ +use rspack_cacheable::{cacheable, from_bytes, to_bytes}; + +#[cacheable] +#[derive(Debug, PartialEq, Eq)] +struct Person { + name: String, +} + +#[test] +fn basic_macro_feature() { + let a = Person { + name: String::from("a"), + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/hashable.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/hashable.rs new file mode 100644 index 000000000000..b95e6fb051e8 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/hashable.rs @@ -0,0 +1,19 @@ +use rspack_cacheable::{cacheable, from_bytes, to_bytes}; +use rustc_hash::FxHashSet as HashSet; + +#[cacheable(hashable)] +#[derive(Debug, Hash, PartialEq, Eq)] +struct Person { + name: String, +} + +#[test] +fn hashable_attr() { + let mut a = HashSet::default(); + a.insert(Person { + name: String::from("a"), + }); + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a: HashSet = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/mod.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/mod.rs new file mode 100644 index 000000000000..3aae63615e73 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/mod.rs @@ -0,0 +1,6 @@ +mod as_attr; +mod basic; +mod hashable; +mod omit_bounds; +mod with_attr; +mod with_attr_and_generics; diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/omit_bounds.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/omit_bounds.rs new file mode 100644 index 000000000000..7efcd160a93a --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/omit_bounds.rs @@ -0,0 +1,53 @@ +use dashmap::DashMap; +use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::AsMap}; + +#[cacheable] +#[derive(Debug, Clone)] +struct Value { + id: String, + #[cacheable(omit_bounds, with=AsMap)] + map: DashMap, + #[cacheable(omit_bounds)] + children: Vec, +} + +#[test] +fn omit_bounds_attr() { + let map = DashMap::default(); + map.insert( + String::from("a"), + Value { + id: String::from("a"), + map: DashMap::default(), + children: vec![], + }, + ); + map.insert( + String::from("b"), + Value { + id: String::from("b"), + map: DashMap::default(), + children: vec![], + }, + ); + let value = Value { + id: String::from("root"), + children: map.iter().map(|item| item.value().clone()).collect(), + map, + }; + let bytes = to_bytes(&value, &()).unwrap(); + let new_value: Value = from_bytes(&bytes, &()).unwrap(); + + assert_eq!(value.id, new_value.id); + for (key, value) in new_value.map { + assert!(key == "a" || key == "b"); + assert!(value.id == "a" || value.id == "b"); + assert_eq!(value.map.len(), 0); + assert_eq!(value.children.len(), 0); + } + for value in new_value.children { + assert!(value.id == "a" || value.id == "b"); + assert_eq!(value.map.len(), 0); + assert_eq!(value.children.len(), 0); + } +} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr.rs new file mode 100644 index 000000000000..4ce701922ce6 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr.rs @@ -0,0 +1,37 @@ +use rspack_cacheable::{ + cacheable, from_bytes, to_bytes, + with::{AsRefStr, AsRefStrConverter}, +}; + +#[derive(Debug, PartialEq, Eq)] +struct UnCacheable; + +#[cacheable(with=AsRefStr)] +#[derive(Debug, PartialEq, Eq)] +struct Person { + name: String, + uncacheable: UnCacheable, +} + +impl AsRefStrConverter for Person { + fn as_str(&self) -> &str { + &self.name + } + fn from_str(s: &str) -> Self { + Self { + name: String::from(s), + uncacheable: UnCacheable, + } + } +} + +#[test] +fn with_attr() { + let a = Person { + name: String::from("a"), + uncacheable: UnCacheable, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr_and_generics.rs b/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr_and_generics.rs new file mode 100644 index 000000000000..6acaf14ad956 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/cacheable/with_attr_and_generics.rs @@ -0,0 +1,39 @@ +use rspack_cacheable::{ + cacheable, from_bytes, to_bytes, + with::{AsRefStr, AsRefStrConverter}, +}; + +#[derive(Debug, Default, PartialEq, Eq)] +struct UnCacheable; + +#[cacheable(with=AsRefStr)] +#[derive(Debug, PartialEq, Eq)] +struct Person +where + T: Default, +{ + name: String, + uncacheable: T, +} +impl AsRefStrConverter for Person { + fn as_str(&self) -> &str { + &self.name + } + fn from_str(s: &str) -> Self { + Self { + name: String::from(s), + uncacheable: Default::default(), + } + } +} + +#[test] +fn with_attr_and_generics() { + let a = Person { + name: String::from("a"), + uncacheable: UnCacheable, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable.rs deleted file mode 100644 index 9833026614c4..000000000000 --- a/crates/rspack_cacheable_test/tests/macro/manual_cacheable.rs +++ /dev/null @@ -1,258 +0,0 @@ -use rspack_cacheable::{ - from_bytes, to_bytes, - with::{AsMap, AsRefStr, AsRefStrConverter}, -}; - -#[test] -fn basic_macro_feature() { - #[derive( - rspack_cacheable::__private::rkyv::Archive, - rspack_cacheable::__private::rkyv::Deserialize, - rspack_cacheable::__private::rkyv::Serialize, - )] - #[rkyv(crate=rspack_cacheable::__private::rkyv)] - #[derive(Debug, PartialEq, Eq)] - struct Person { - name: String, - } - - let a = Person { - name: String::from("a"), - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn hashable_attr() { - use rustc_hash::FxHashSet as HashSet; - #[derive( - rspack_cacheable::__private::rkyv::Archive, - rspack_cacheable::__private::rkyv::Deserialize, - rspack_cacheable::__private::rkyv::Serialize, - )] - #[rkyv(crate=rspack_cacheable::__private::rkyv)] - #[rkyv(derive(Hash, PartialEq, Eq))] - #[derive(Debug, Hash, PartialEq, Eq)] - struct Person { - name: String, - } - - let mut a = HashSet::default(); - a.insert(Person { - name: String::from("a"), - }); - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a: HashSet = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn with_attr() { - #[derive(Debug, PartialEq, Eq)] - struct UnCacheable; - - #[derive(Debug, PartialEq, Eq)] - struct Person { - name: String, - uncacheable: UnCacheable, - } - impl AsRefStrConverter for Person { - fn as_str(&self) -> &str { - &self.name - } - fn from_str(s: &str) -> Self { - Self { - name: String::from(s), - uncacheable: UnCacheable, - } - } - } - - #[allow(non_upper_case_globals)] - const _: () = { - use rkyv::{ - rancor::Fallible, - with::{ArchiveWith, DeserializeWith, SerializeWith}, - Archive, Deserialize, Place, Serialize, - }; - use rspack_cacheable::__private::rkyv; - impl Archive for Person { - type Archived = >::Archived; - type Resolver = >::Resolver; - #[inline] - fn resolve(&self, resolver: Self::Resolver, out: Place) { - >::resolve_with(self, resolver, out) - } - } - impl Serialize for Person - where - S: Fallible + ?Sized, - AsRefStr: SerializeWith, - { - #[inline] - fn serialize(&self, serializer: &mut S) -> Result { - AsRefStr::serialize_with(self, serializer) - } - } - impl Deserialize for >::Archived - where - D: Fallible + ?Sized, - AsRefStr: DeserializeWith<>::Archived, Person, D>, - { - #[inline] - fn deserialize(&self, deserializer: &mut D) -> Result { - AsRefStr::deserialize_with(self, deserializer) - } - } - }; - - let a = Person { - name: String::from("a"), - uncacheable: UnCacheable, - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn with_attr_with_generics() { - #[derive(Debug, Default, PartialEq, Eq)] - struct UnCacheable; - - #[derive(Debug, PartialEq, Eq)] - struct Person { - name: String, - uncacheable: T, - } - impl AsRefStrConverter for Person { - fn as_str(&self) -> &str { - &self.name - } - fn from_str(s: &str) -> Self { - Self { - name: String::from(s), - uncacheable: Default::default(), - } - } - } - - #[allow(non_upper_case_globals)] - const _: () = { - use rkyv::{ - rancor::Fallible, - with::{ArchiveWith, DeserializeWith, SerializeWith}, - Archive, Deserialize, Place, Serialize, - }; - use rspack_cacheable::__private::rkyv; - impl Archive for Person { - type Archived = >>::Archived; - type Resolver = >>::Resolver; - #[inline] - fn resolve(&self, resolver: Self::Resolver, out: Place) { - >>::resolve_with(self, resolver, out) - } - } - impl Serialize for Person - where - S: Fallible + ?Sized, - AsRefStr: SerializeWith, S>, - { - #[inline] - fn serialize(&self, serializer: &mut S) -> Result { - AsRefStr::serialize_with(self, serializer) - } - } - impl Deserialize, D> for >>::Archived - where - D: Fallible + ?Sized, - AsRefStr: DeserializeWith<>>::Archived, Person, D>, - { - #[inline] - fn deserialize(&self, deserializer: &mut D) -> Result, D::Error> { - AsRefStr::deserialize_with(self, deserializer) - } - } - }; - - let a = Person { - name: String::from("a"), - uncacheable: UnCacheable, - }; - let bytes = to_bytes(&a, &()).unwrap(); - let deserialize_a = from_bytes(&bytes, &()).unwrap(); - assert_eq!(a, deserialize_a); -} - -#[test] -fn omit_bounds_attr() { - use dashmap::DashMap; - - // reference: https://github.com/rkyv/rkyv/blob/739f53928d7c9c870b1d2072a9b73c80466f2a87/rkyv/examples/json_like_schema.rs#L45 - #[derive( - rspack_cacheable::__private::rkyv::Archive, - rspack_cacheable::__private::rkyv::Deserialize, - rspack_cacheable::__private::rkyv::Serialize, - )] - #[rkyv(crate=rspack_cacheable::__private::rkyv)] - #[rkyv(serialize_bounds( - __S: rspack_cacheable::__private::rkyv::ser::Writer + rspack_cacheable::__private::rkyv::ser::Allocator + rspack_cacheable::__private::rkyv::rancor::Fallible, - ))] - #[rkyv(deserialize_bounds( - __D: rspack_cacheable::__private::rkyv::rancor::Fallible - ))] - #[rkyv(bytecheck( - bounds( - __C: rspack_cacheable::__private::rkyv::validation::ArchiveContext + rspack_cacheable::__private::rkyv::rancor::Fallible, - ) - ))] - #[derive(Debug, Clone)] - struct Value { - id: String, - #[rkyv(omit_bounds)] - #[rkyv(with=AsMap)] - map: DashMap, - #[rkyv(omit_bounds)] - children: Vec, - } - - let map = DashMap::default(); - map.insert( - String::from("a"), - Value { - id: String::from("a"), - map: DashMap::default(), - children: vec![], - }, - ); - map.insert( - String::from("b"), - Value { - id: String::from("b"), - map: DashMap::default(), - children: vec![], - }, - ); - let value = Value { - id: String::from("root"), - children: map.iter().map(|item| item.value().clone()).collect(), - map, - }; - let bytes = to_bytes(&value, &()).unwrap(); - let new_value: Value = from_bytes(&bytes, &()).unwrap(); - - assert_eq!(value.id, new_value.id); - for (key, value) in new_value.map { - assert!(key == "a" || key == "b"); - assert!(value.id == "a" || value.id == "b"); - assert_eq!(value.map.len(), 0); - assert_eq!(value.children.len(), 0); - } - for value in new_value.children { - assert!(value.id == "a" || value.id == "b"); - assert_eq!(value.map.len(), 0); - assert_eq!(value.children.len(), 0); - } -} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/as_attr.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/as_attr.rs new file mode 100644 index 000000000000..8fdccb1dca49 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/as_attr.rs @@ -0,0 +1,85 @@ +use rspack_cacheable::{ + cacheable_dyn, from_bytes, to_bytes, + with::{AsOption, AsTuple2, AsVec, Inline}, +}; + +#[cacheable_dyn] +trait Module {} + +#[derive( + rspack_cacheable::__private::rkyv::Archive, + rspack_cacheable::__private::rkyv::Deserialize, + rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv)] +struct NormalModule { + inner: String, +} + +#[cacheable_dyn] +impl Module for NormalModule {} + +#[derive( + rspack_cacheable::__private::rkyv::Archive, + rspack_cacheable::__private::rkyv::Deserialize, + rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv)] +struct Data { + block1: String, + block2: Vec<(String, Option)>, + block3: Box, +} + +#[derive( + rspack_cacheable::__private::rkyv::Archive, rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv, + as=rspack_cacheable::__private::rkyv::Archived)] +#[rkyv(serialize_bounds( + __S: rspack_cacheable::__private::rkyv::ser::Writer + rspack_cacheable::__private::rkyv::ser::Allocator + rspack_cacheable::__private::rkyv::rancor::Fallible, + Inline: rspack_cacheable::__private::rkyv::with::SerializeWith<&'a String, __S>, + AsVec>>: rspack_cacheable::__private::rkyv::with::SerializeWith)>, __S>, + Inline: rspack_cacheable::__private::rkyv::with::SerializeWith<&'a Box, __S> + ))] +struct DataRef<'a> { + #[rkyv(omit_bounds)] + #[rkyv(with=Inline)] + block1: &'a String, + #[rkyv(omit_bounds)] + #[rkyv(with=AsVec>>)] + block2: Vec<(&'a String, Option<&'a String>)>, + #[allow(clippy::borrowed_box)] + #[rkyv(omit_bounds)] + #[rkyv(with=Inline)] + block3: &'a Box, +} + +#[test] +#[cfg_attr(miri, ignore)] +fn as_attr() { + let a = Data { + block1: "abc".into(), + block2: vec![ + ("key1".into(), None), + ("key2".into(), Some("value2".into())), + ("key3".into(), Some("value3".into())), + ], + block3: Box::new(NormalModule { + inner: "inner".into(), + }), + }; + let a_ref = DataRef { + block1: &a.block1, + block2: a + .block2 + .iter() + .map(|(key, value)| (key, value.as_ref())) + .collect(), + block3: &a.block3, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let bytes_ref = to_bytes(&a_ref, &()).unwrap(); + assert_eq!(bytes, bytes_ref); + from_bytes::(&bytes, &()).unwrap(); +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/basic.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/basic.rs new file mode 100644 index 000000000000..adc10ff04423 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/basic.rs @@ -0,0 +1,22 @@ +use rspack_cacheable::{from_bytes, to_bytes}; + +#[derive( + rspack_cacheable::__private::rkyv::Archive, + rspack_cacheable::__private::rkyv::Deserialize, + rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv)] +#[derive(Debug, PartialEq, Eq)] +struct Person { + name: String, +} + +#[test] +fn basic_macro_feature() { + let a = Person { + name: String::from("a"), + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/hashable.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/hashable.rs new file mode 100644 index 000000000000..b8c07ee605c6 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/hashable.rs @@ -0,0 +1,25 @@ +use rspack_cacheable::{from_bytes, to_bytes}; +use rustc_hash::FxHashSet as HashSet; + +#[derive( + rspack_cacheable::__private::rkyv::Archive, + rspack_cacheable::__private::rkyv::Deserialize, + rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv)] +#[rkyv(derive(Hash, PartialEq, Eq))] +#[derive(Debug, Hash, PartialEq, Eq)] +struct Person { + name: String, +} + +#[test] +fn hashable_attr() { + let mut a = HashSet::default(); + a.insert(Person { + name: String::from("a"), + }); + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a: HashSet = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/mod.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/mod.rs new file mode 100644 index 000000000000..3aae63615e73 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/mod.rs @@ -0,0 +1,6 @@ +mod as_attr; +mod basic; +mod hashable; +mod omit_bounds; +mod with_attr; +mod with_attr_and_generics; diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/omit_bounds.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/omit_bounds.rs new file mode 100644 index 000000000000..76ac027918b1 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/omit_bounds.rs @@ -0,0 +1,71 @@ +use dashmap::DashMap; +use rspack_cacheable::{from_bytes, to_bytes, with::AsMap}; + +// reference: https://github.com/rkyv/rkyv/blob/739f53928d7c9c870b1d2072a9b73c80466f2a87/rkyv/examples/json_like_schema.rs#L45 +#[derive( + rspack_cacheable::__private::rkyv::Archive, + rspack_cacheable::__private::rkyv::Deserialize, + rspack_cacheable::__private::rkyv::Serialize, +)] +#[rkyv(crate=rspack_cacheable::__private::rkyv)] +#[rkyv(serialize_bounds( + __S: rspack_cacheable::__private::rkyv::ser::Writer + rspack_cacheable::__private::rkyv::ser::Allocator + rspack_cacheable::__private::rkyv::rancor::Fallible, + ))] +#[rkyv(deserialize_bounds( + __D: rspack_cacheable::__private::rkyv::rancor::Fallible + ))] +#[rkyv(bytecheck( + bounds( + __C: rspack_cacheable::__private::rkyv::validation::ArchiveContext + rspack_cacheable::__private::rkyv::rancor::Fallible, + ) + ))] +#[derive(Debug, Clone)] +struct Value { + id: String, + #[rkyv(omit_bounds)] + #[rkyv(with=AsMap)] + map: DashMap, + #[rkyv(omit_bounds)] + children: Vec, +} + +#[test] +fn omit_bounds_attr() { + let map = DashMap::default(); + map.insert( + String::from("a"), + Value { + id: String::from("a"), + map: DashMap::default(), + children: vec![], + }, + ); + map.insert( + String::from("b"), + Value { + id: String::from("b"), + map: DashMap::default(), + children: vec![], + }, + ); + let value = Value { + id: String::from("root"), + children: map.iter().map(|item| item.value().clone()).collect(), + map, + }; + let bytes = to_bytes(&value, &()).unwrap(); + let new_value: Value = from_bytes(&bytes, &()).unwrap(); + + assert_eq!(value.id, new_value.id); + for (key, value) in new_value.map { + assert!(key == "a" || key == "b"); + assert!(value.id == "a" || value.id == "b"); + assert_eq!(value.map.len(), 0); + assert_eq!(value.children.len(), 0); + } + for value in new_value.children { + assert!(value.id == "a" || value.id == "b"); + assert_eq!(value.map.len(), 0); + assert_eq!(value.children.len(), 0); + } +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr.rs new file mode 100644 index 000000000000..77b337686a90 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr.rs @@ -0,0 +1,73 @@ +use rspack_cacheable::{ + from_bytes, to_bytes, + with::{AsRefStr, AsRefStrConverter}, +}; + +#[derive(Debug, PartialEq, Eq)] +struct UnCacheable; + +#[derive(Debug, PartialEq, Eq)] +struct Person { + name: String, + uncacheable: UnCacheable, +} +impl AsRefStrConverter for Person { + fn as_str(&self) -> &str { + &self.name + } + fn from_str(s: &str) -> Self { + Self { + name: String::from(s), + uncacheable: UnCacheable, + } + } +} + +#[allow(non_upper_case_globals)] +const _: () = { + use rkyv::{ + rancor::Fallible, + with::{ArchiveWith, DeserializeWith, SerializeWith}, + Archive, Deserialize, Place, Serialize, + }; + use rspack_cacheable::__private::rkyv; + impl Archive for Person { + type Archived = >::Archived; + type Resolver = >::Resolver; + #[inline] + fn resolve(&self, resolver: Self::Resolver, out: Place) { + >::resolve_with(self, resolver, out) + } + } + impl Serialize for Person + where + S: Fallible + ?Sized, + AsRefStr: SerializeWith, + { + #[inline] + fn serialize(&self, serializer: &mut S) -> Result { + AsRefStr::serialize_with(self, serializer) + } + } + impl Deserialize for >::Archived + where + D: Fallible + ?Sized, + AsRefStr: DeserializeWith<>::Archived, Person, D>, + { + #[inline] + fn deserialize(&self, deserializer: &mut D) -> Result { + AsRefStr::deserialize_with(self, deserializer) + } + } +}; + +#[test] +fn with_attr() { + let a = Person { + name: String::from("a"), + uncacheable: UnCacheable, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr_and_generics.rs b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr_and_generics.rs new file mode 100644 index 000000000000..b01c0fa17a39 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/macro/manual_cacheable/with_attr_and_generics.rs @@ -0,0 +1,73 @@ +use rspack_cacheable::{ + from_bytes, to_bytes, + with::{AsRefStr, AsRefStrConverter}, +}; + +#[derive(Debug, Default, PartialEq, Eq)] +struct UnCacheable; + +#[derive(Debug, PartialEq, Eq)] +struct Person { + name: String, + uncacheable: T, +} +impl AsRefStrConverter for Person { + fn as_str(&self) -> &str { + &self.name + } + fn from_str(s: &str) -> Self { + Self { + name: String::from(s), + uncacheable: Default::default(), + } + } +} + +#[allow(non_upper_case_globals)] +const _: () = { + use rkyv::{ + rancor::Fallible, + with::{ArchiveWith, DeserializeWith, SerializeWith}, + Archive, Deserialize, Place, Serialize, + }; + use rspack_cacheable::__private::rkyv; + impl Archive for Person { + type Archived = >>::Archived; + type Resolver = >>::Resolver; + #[inline] + fn resolve(&self, resolver: Self::Resolver, out: Place) { + >>::resolve_with(self, resolver, out) + } + } + impl Serialize for Person + where + S: Fallible + ?Sized, + AsRefStr: SerializeWith, S>, + { + #[inline] + fn serialize(&self, serializer: &mut S) -> Result { + AsRefStr::serialize_with(self, serializer) + } + } + impl Deserialize, D> for >>::Archived + where + D: Fallible + ?Sized, + AsRefStr: DeserializeWith<>>::Archived, Person, D>, + { + #[inline] + fn deserialize(&self, deserializer: &mut D) -> Result, D::Error> { + AsRefStr::deserialize_with(self, deserializer) + } + } +}; + +#[test] +fn with_attr_with_generics() { + let a = Person { + name: String::from("a"), + uncacheable: UnCacheable, + }; + let bytes = to_bytes(&a, &()).unwrap(); + let deserialize_a = from_bytes(&bytes, &()).unwrap(); + assert_eq!(a, deserialize_a); +} diff --git a/crates/rspack_cacheable_test/tests/with/as_preset/mod.rs b/crates/rspack_cacheable_test/tests/with/as_preset/mod.rs index 4fa36e170918..6b17ac2fa69a 100644 --- a/crates/rspack_cacheable_test/tests/with/as_preset/mod.rs +++ b/crates/rspack_cacheable_test/tests/with/as_preset/mod.rs @@ -2,6 +2,7 @@ mod camino; mod json; mod lightningcss; mod rspack_resolver; +mod rspack_sources; mod serde_json; mod swc; mod ustr; diff --git a/crates/rspack_cacheable_test/tests/with/as_preset/rspack_sources.rs b/crates/rspack_cacheable_test/tests/with/as_preset/rspack_sources.rs new file mode 100644 index 000000000000..7f1c1a538ae5 --- /dev/null +++ b/crates/rspack_cacheable_test/tests/with/as_preset/rspack_sources.rs @@ -0,0 +1,22 @@ +use rspack_cacheable::{cacheable, from_bytes, to_bytes, with::AsPreset}; +use rspack_sources::{BoxSource, RawSource, SourceExt}; + +#[cacheable] +#[derive(Debug)] +struct Data(#[cacheable(with=AsPreset)] BoxSource); + +#[test] +fn test_rspack_source() { + fn test_data(data: Data) { + let bytes = to_bytes(&data, &()).unwrap(); + let new_data: Data = from_bytes(&bytes, &()).unwrap(); + assert_eq!(data.0.buffer(), new_data.0.buffer()); + assert_eq!( + data.0.map(&Default::default()), + new_data.0.map(&Default::default()) + ); + } + + test_data(Data(RawSource::from("123".as_bytes()).boxed())); + test_data(Data(RawSource::from("123").boxed())); +} diff --git a/crates/rspack_cacheable_test/tests/with/inline.rs b/crates/rspack_cacheable_test/tests/with/inline.rs new file mode 100644 index 000000000000..8aa83042dd8b --- /dev/null +++ b/crates/rspack_cacheable_test/tests/with/inline.rs @@ -0,0 +1,38 @@ +use rspack_cacheable::{ + cacheable, from_bytes, + with::{AsCacheable, AsTuple2, Inline}, +}; + +#[cacheable] +#[derive(Debug, PartialEq, Eq)] +struct Data { + block1: String, + block2: (String, String), +} + +#[cacheable] +struct DataRef<'a> { + #[cacheable(with=Inline)] + block1: &'a String, + #[cacheable(with=AsTuple2)] + block2: (String, &'a String), +} + +#[test] +fn test_inline() { + let data = Data { + block1: "block1".into(), + block2: ("block2_key".into(), "block2_value".into()), + }; + let bytes = rspack_cacheable::to_bytes(&data, &()).unwrap(); + + let data_ref = DataRef { + block1: &data.block1, + block2: (data.block2.0.clone(), &data.block2.1), + }; + let bytes_ref = rspack_cacheable::to_bytes(&data_ref, &()).unwrap(); + assert_eq!(bytes, bytes_ref); + + let new_data: Data = from_bytes(&bytes, &()).unwrap(); + assert_eq!(data, new_data); +} diff --git a/crates/rspack_cacheable_test/tests/with/mod.rs b/crates/rspack_cacheable_test/tests/with/mod.rs index 51ae754ddf5d..667d07dc29bc 100644 --- a/crates/rspack_cacheable_test/tests/with/mod.rs +++ b/crates/rspack_cacheable_test/tests/with/mod.rs @@ -9,4 +9,5 @@ mod as_string; mod as_tuple2; mod as_tuple3; mod as_vec; +mod inline; mod unsupported; diff --git a/crates/rspack_core/src/cache/mod.rs b/crates/rspack_core/src/cache/mod.rs index aeaafa1669e2..8a36d571faa0 100644 --- a/crates/rspack_core/src/cache/mod.rs +++ b/crates/rspack_core/src/cache/mod.rs @@ -4,7 +4,7 @@ pub mod persistent; use std::{fmt::Debug, sync::Arc}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use self::{disable::DisableCache, memory::MemoryCache, persistent::PersistentCache}; use crate::{Compilation, CompilerOptions, ExperimentCacheOptions}; @@ -26,10 +26,7 @@ pub trait Cache: Debug + Send + Sync { fn after_compile(&self, _compilation: &Compilation) {} } -pub fn new_cache( - compiler_option: Arc, - fs: Arc, -) -> Arc { +pub fn new_cache(compiler_option: Arc, fs: Arc) -> Arc { match &compiler_option.experiments.cache { ExperimentCacheOptions::Disabled => Arc::new(DisableCache), ExperimentCacheOptions::Memory => Arc::new(MemoryCache), diff --git a/crates/rspack_core/src/cache/persistent/mod.rs b/crates/rspack_core/src/cache/persistent/mod.rs index d237b8c4780f..e27015182b10 100644 --- a/crates/rspack_core/src/cache/persistent/mod.rs +++ b/crates/rspack_core/src/cache/persistent/mod.rs @@ -3,7 +3,7 @@ pub mod storage; use std::sync::Arc; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_paths::{AssertUtf8, Utf8PathBuf}; use rustc_hash::FxHashSet as HashSet; @@ -28,7 +28,7 @@ pub struct PersistentCache { } impl PersistentCache { - pub fn new(option: &PersistentCacheOptions, fs: Arc) -> Self { + pub fn new(option: &PersistentCacheOptions, fs: Arc) -> Self { let storage = Arc::new(MemoryStorage::default()); Self { snapshot: Snapshot::new(option.snapshot.clone(), fs, storage.clone()), diff --git a/crates/rspack_core/src/cache/persistent/snapshot/mod.rs b/crates/rspack_core/src/cache/persistent/snapshot/mod.rs index 78235b1fac2b..63a41aae9d7f 100644 --- a/crates/rspack_core/src/cache/persistent/snapshot/mod.rs +++ b/crates/rspack_core/src/cache/persistent/snapshot/mod.rs @@ -4,7 +4,7 @@ mod strategy; use std::sync::Arc; use rspack_cacheable::{from_bytes, to_bytes}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_paths::Utf8PathBuf; use rustc_hash::FxHashSet as HashSet; @@ -25,16 +25,12 @@ pub struct Snapshot { // 1. update compiler.input_file_system to async file system // 2. update this fs to AsyncReadableFileSystem // 3. update add/calc_modified_files to async fn - fs: Arc, + fs: Arc, storage: Arc, } impl Snapshot { - pub fn new( - options: SnapshotOptions, - fs: Arc, - storage: Arc, - ) -> Self { + pub fn new(options: SnapshotOptions, fs: Arc, storage: Arc) -> Self { Self { options, fs, @@ -107,14 +103,14 @@ impl Snapshot { mod tests { use std::sync::Arc; - use rspack_fs::{MemoryFileSystem, SyncWritableFileSystem}; + use rspack_fs::{MemoryFileSystem, WritableFileSystem}; use rspack_paths::Utf8PathBuf; use super::super::MemoryStorage; use super::{PathMatcher, Snapshot, SnapshotOptions}; - #[test] - fn should_snapshot_work() { + #[tokio::test] + async fn should_snapshot_work() { let fs = Arc::new(MemoryFileSystem::default()); let storage = Arc::new(MemoryStorage::default()); let options = SnapshotOptions::new( @@ -123,23 +119,31 @@ mod tests { vec![PathMatcher::String("node_modules".into())], ); - fs.create_dir_all("/node_modules/project".into()).unwrap(); - fs.create_dir_all("/node_modules/lib".into()).unwrap(); - fs.write("/file1".into(), "abc".as_bytes()).unwrap(); - fs.write("/constant".into(), "abc".as_bytes()).unwrap(); + fs.create_dir_all("/node_modules/project".into()) + .await + .unwrap(); + fs.create_dir_all("/node_modules/lib".into()).await.unwrap(); + fs.write("/file1".into(), "abc".as_bytes()).await.unwrap(); + fs.write("/constant".into(), "abc".as_bytes()) + .await + .unwrap(); fs.write( "/node_modules/project/package.json".into(), r#"{"version":"1.0.0"}"#.as_bytes(), ) + .await .unwrap(); fs.write("/node_modules/project/file1".into(), "abc".as_bytes()) + .await .unwrap(); fs.write( "/node_modules/lib/package.json".into(), r#"{"version":"1.1.0"}"#.as_bytes(), ) + .await .unwrap(); fs.write("/node_modules/lib/file1".into(), "abc".as_bytes()) + .await .unwrap(); let snapshot = Snapshot::new(options, fs.clone(), storage); @@ -153,11 +157,15 @@ mod tests { .iter(), ); std::thread::sleep(std::time::Duration::from_millis(100)); - fs.write("/file1".into(), "abcd".as_bytes()).unwrap(); - fs.write("/constant".into(), "abcd".as_bytes()).unwrap(); + fs.write("/file1".into(), "abcd".as_bytes()).await.unwrap(); + fs.write("/constant".into(), "abcd".as_bytes()) + .await + .unwrap(); fs.write("/node_modules/project/file1".into(), "abcd".as_bytes()) + .await .unwrap(); fs.write("/node_modules/lib/file1".into(), "abcd".as_bytes()) + .await .unwrap(); let (modified_paths, deleted_paths) = snapshot.calc_modified_paths(); @@ -171,6 +179,7 @@ mod tests { "/node_modules/lib/package.json".into(), r#"{"version":"1.3.0"}"#.as_bytes(), ) + .await .unwrap(); snapshot.add(["/file1".into()].iter()); let (modified_paths, deleted_paths) = snapshot.calc_modified_paths(); diff --git a/crates/rspack_core/src/cache/persistent/snapshot/strategy.rs b/crates/rspack_core/src/cache/persistent/snapshot/strategy.rs index c5cabb863f88..299cab2ed65b 100644 --- a/crates/rspack_core/src/cache/persistent/snapshot/strategy.rs +++ b/crates/rspack_core/src/cache/persistent/snapshot/strategy.rs @@ -4,7 +4,7 @@ use std::{ }; use rspack_cacheable::cacheable; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_paths::Utf8PathBuf; use rustc_hash::FxHashMap as HashMap; @@ -36,12 +36,12 @@ pub enum ValidateResult { } pub struct StrategyHelper { - fs: Arc, + fs: Arc, package_version_cache: HashMap>, } impl StrategyHelper { - pub fn new(fs: Arc) -> Self { + pub fn new(fs: Arc) -> Self { Self { fs, package_version_cache: Default::default(), @@ -132,26 +132,28 @@ impl StrategyHelper { mod tests { use std::sync::Arc; - use rspack_fs::{MemoryFileSystem, SyncReadableFileSystem, SyncWritableFileSystem}; + use rspack_fs::{MemoryFileSystem, ReadableFileSystem, WritableFileSystem}; use super::{Strategy, StrategyHelper, ValidateResult}; - #[test] - fn should_strategy_works() { + #[tokio::test] + async fn should_strategy_works() { let fs = Arc::new(MemoryFileSystem::default()); - fs.create_dir_all("/packages/p1".into()).unwrap(); - fs.create_dir_all("/packages/p2".into()).unwrap(); + fs.create_dir_all("/packages/p1".into()).await.unwrap(); + fs.create_dir_all("/packages/p2".into()).await.unwrap(); fs.write( "/packages/p1/package.json".into(), r#"{"version": "1.0.0"}"#.as_bytes(), ) + .await .unwrap(); fs.write( "/packages/p2/package.json".into(), r#"{"version": "1.1.0"}"#.as_bytes(), ) + .await .unwrap(); - fs.write("/file1".into(), "abc".as_bytes()).unwrap(); + fs.write("/file1".into(), "abc".as_bytes()).await.unwrap(); // compile_time let Strategy::CompileTime(time1) = StrategyHelper::compile_time() else { @@ -222,7 +224,7 @@ mod tests { ValidateResult::NoChanged )); std::thread::sleep(std::time::Duration::from_millis(100)); - fs.write("/file1".into(), "abcd".as_bytes()).unwrap(); + fs.write("/file1".into(), "abcd".as_bytes()).await.unwrap(); assert!(matches!( helper.validate(&"/file1".into(), &now), ValidateResult::Modified diff --git a/crates/rspack_core/src/chunk.rs b/crates/rspack_core/src/chunk.rs index 92b8c92a695d..04c87f5dfc8f 100644 --- a/crates/rspack_core/src/chunk.rs +++ b/crates/rspack_core/src/chunk.rs @@ -5,11 +5,13 @@ use std::{fmt::Debug, hash::Hash}; use indexmap::IndexMap; use itertools::Itertools; use rspack_collections::{DatabaseItem, UkeyIndexMap, UkeyIndexSet, UkeyMap, UkeySet}; +use rspack_error::Diagnostic; use rspack_hash::{RspackHash, RspackHashDigest}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet, FxHasher}; use crate::{ compare_chunk_group, merge_runtime, sort_group_by_index, ChunkGraph, ChunkGroupOrderKey, + RenderManifestEntry, }; use crate::{ChunkGroupByUkey, ChunkGroupUkey, ChunkUkey, SourceType}; use crate::{Compilation, EntryOptions, Filename, ModuleGraph, RuntimeSpec}; @@ -42,6 +44,12 @@ impl ChunkHashesResult { } } +#[derive(Debug, Clone)] +pub struct ChunkRenderResult { + pub manifests: Vec, + pub diagnostics: Vec, +} + #[derive(Debug, Clone)] pub struct Chunk { ukey: ChunkUkey, diff --git a/crates/rspack_core/src/compiler/compilation.rs b/crates/rspack_core/src/compiler/compilation.rs index 318490a8fe09..478e28d1852f 100644 --- a/crates/rspack_core/src/compiler/compilation.rs +++ b/crates/rspack_core/src/compiler/compilation.rs @@ -14,7 +14,7 @@ use rspack_collections::{ DatabaseItem, Identifiable, IdentifierDashMap, IdentifierMap, IdentifierSet, UkeyMap, UkeySet, }; use rspack_error::{error, miette::diagnostic, Diagnostic, DiagnosticExt, Result, Severity}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_futures::FuturesResults; use rspack_hash::{RspackHash, RspackHashDigest}; use rspack_hook::define_hook; @@ -39,12 +39,12 @@ use crate::{ old_cache::{use_code_splitting_cache, Cache as OldCache, CodeSplittingCache}, to_identifier, BoxDependency, BoxModule, CacheCount, CacheOptions, Chunk, ChunkByUkey, ChunkContentHash, ChunkGraph, ChunkGroupByUkey, ChunkGroupUkey, ChunkHashesResult, ChunkKind, - ChunkUkey, CodeGenerationJob, CodeGenerationResult, CodeGenerationResults, CompilationLogger, - CompilationLogging, CompilerOptions, DependencyId, DependencyType, Entry, EntryData, - EntryOptions, EntryRuntime, Entrypoint, ExecuteModuleId, Filename, ImportVarMap, LocalFilenameFn, - Logger, ModuleFactory, ModuleGraph, ModuleGraphPartial, ModuleIdentifier, PathData, - ResolverFactory, RuntimeGlobals, RuntimeModule, RuntimeSpecMap, SharedPluginDriver, SourceType, - Stats, + ChunkRenderResult, ChunkUkey, CodeGenerationJob, CodeGenerationResult, CodeGenerationResults, + CompilationLogger, CompilationLogging, CompilerOptions, DependencyId, DependencyType, Entry, + EntryData, EntryOptions, EntryRuntime, Entrypoint, ExecuteModuleId, Filename, ImportVarMap, + LocalFilenameFn, Logger, ModuleFactory, ModuleGraph, ModuleGraphPartial, ModuleIdentifier, + PathData, ResolverFactory, RuntimeGlobals, RuntimeModule, RuntimeSpecMap, SharedPluginDriver, + SourceType, Stats, }; pub type BuildDependency = ( @@ -175,7 +175,7 @@ pub struct Compilation { pub cgm_runtime_requirements_results: CgmRuntimeRequirementsResults, pub cgc_runtime_requirements_results: UkeyMap, pub chunk_hashes_results: UkeyMap, - pub chunk_render_results: UkeyMap, Vec)>, + pub chunk_render_results: UkeyMap, pub built_modules: IdentifierSet, pub code_generated_modules: IdentifierSet, pub build_time_executed_modules: IdentifierSet, @@ -201,7 +201,7 @@ pub struct Compilation { pub modified_files: HashSet, pub removed_files: HashSet, make_artifact: MakeArtifact, - pub input_filesystem: Arc, + pub input_filesystem: Arc, } impl Compilation { @@ -238,7 +238,7 @@ impl Compilation { module_executor: Option, modified_files: HashSet, removed_files: HashSet, - input_filesystem: Arc, + input_filesystem: Arc, ) -> Self { let incremental = Incremental::new(options.experiments.incremental); Self { @@ -972,15 +972,21 @@ impl Compilation { let chunk_render_results = chunks .iter() .map(|chunk| async { - let mut manifest = Vec::new(); + let mut manifests = Vec::new(); let mut diagnostics = Vec::new(); plugin_driver .compilation_hooks .render_manifest - .call(self, chunk, &mut manifest, &mut diagnostics) + .call(self, chunk, &mut manifests, &mut diagnostics) .await?; - Ok((*chunk, (manifest, diagnostics))) + Ok(( + *chunk, + ChunkRenderResult { + manifests, + diagnostics, + }, + )) }) .collect::>>(); let chunk_render_results = chunk_render_results @@ -995,11 +1001,18 @@ impl Compilation { chunk_render_results }; - for (chunk_ukey, (manifest, diagnostics)) in chunk_ukey_and_manifest { + for ( + chunk_ukey, + ChunkRenderResult { + manifests, + diagnostics, + }, + ) in chunk_ukey_and_manifest + { self.extend_diagnostics(diagnostics); - for file_manifest in manifest { - let filename = file_manifest.filename().to_string(); + for file_manifest in manifests { + let filename = file_manifest.filename; let current_chunk = self.chunk_by_ukey.expect_get_mut(&chunk_ukey); current_chunk.set_rendered(true); @@ -1996,13 +2009,13 @@ impl Compilation { &'a self, filename: &Filename, mut data: PathData<'b>, - ) -> Result<(String, AssetInfo), F::Error> { - let mut info = AssetInfo::default(); + info: &mut AssetInfo, + ) -> Result { if data.hash.is_none() { data.hash = self.get_hash(); } - let path = filename.render(data, Some(&mut info))?; - Ok((path, info)) + let path = filename.render(data, Some(info))?; + Ok(path) } pub fn get_asset_path( @@ -2324,42 +2337,10 @@ pub fn set_depth_if_lower( #[derive(Debug, Clone)] pub struct RenderManifestEntry { pub source: BoxSource, - filename: String, + pub filename: String, + pub has_filename: bool, /* webpack only asset has filename, js/css/wasm has filename template */ pub info: AssetInfo, - // pub identifier: String, - // hash?: string; - pub(crate) auxiliary: bool, - has_filename: bool, /* webpack only asset has filename, js/css/wasm has filename template */ -} - -impl RenderManifestEntry { - pub fn new( - source: BoxSource, - filename: String, - info: AssetInfo, - auxiliary: bool, - has_filename: bool, - ) -> Self { - Self { - source, - filename, - info, - auxiliary, - has_filename, - } - } - - pub fn source(&self) -> &BoxSource { - &self.source - } - - pub fn filename(&self) -> &str { - &self.filename - } - - pub fn has_filename(&self) -> bool { - self.has_filename - } + pub auxiliary: bool, } fn process_runtime_requirement_hook( diff --git a/crates/rspack_core/src/compiler/make/repair/build.rs b/crates/rspack_core/src/compiler/make/repair/build.rs index 0f8bd1f30b42..7ab85f2b7d5a 100644 --- a/crates/rspack_core/src/compiler/make/repair/build.rs +++ b/crates/rspack_core/src/compiler/make/repair/build.rs @@ -1,7 +1,7 @@ use std::{collections::VecDeque, sync::Arc}; use rspack_error::{Diagnostic, IntoTWithDiagnosticArray}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use super::{process_dependencies::ProcessDependenciesTask, MakeTaskContext}; use crate::{ @@ -18,7 +18,7 @@ pub struct BuildTask { pub resolver_factory: Arc, pub compiler_options: Arc, pub plugin_driver: SharedPluginDriver, - pub fs: Arc, + pub fs: Arc, } #[async_trait::async_trait] diff --git a/crates/rspack_core/src/compiler/make/repair/mod.rs b/crates/rspack_core/src/compiler/make/repair/mod.rs index 38737c36eb63..46598357b8eb 100644 --- a/crates/rspack_core/src/compiler/make/repair/mod.rs +++ b/crates/rspack_core/src/compiler/make/repair/mod.rs @@ -6,7 +6,7 @@ pub mod process_dependencies; use std::sync::Arc; use rspack_error::Result; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; use super::MakeArtifact; @@ -24,7 +24,7 @@ pub struct MakeTaskContext { pub compilation_id: CompilationId, pub plugin_driver: SharedPluginDriver, pub buildtime_plugin_driver: SharedPluginDriver, - pub fs: Arc, + pub fs: Arc, pub compiler_options: Arc, pub resolver_factory: Arc, pub loader_resolver_factory: Arc, diff --git a/crates/rspack_core/src/compiler/mod.rs b/crates/rspack_core/src/compiler/mod.rs index b9c0d38c3889..7bc43df7914f 100644 --- a/crates/rspack_core/src/compiler/mod.rs +++ b/crates/rspack_core/src/compiler/mod.rs @@ -5,7 +5,7 @@ mod module_executor; use std::sync::Arc; use rspack_error::Result; -use rspack_fs::{AsyncWritableFileSystem, NativeFileSystem, SyncReadableFileSystem}; +use rspack_fs::{FileSystem, NativeFileSystem, WritableFileSystem}; use rspack_futures::FuturesResults; use rspack_hook::define_hook; use rspack_paths::{Utf8Path, Utf8PathBuf}; @@ -53,8 +53,8 @@ pub struct CompilerHooks { #[derive(Debug)] pub struct Compiler { pub options: Arc, - pub output_filesystem: Box, - pub input_filesystem: Arc, + pub output_filesystem: Box, + pub input_filesystem: Arc, pub compilation: Compilation, pub plugin_driver: SharedPluginDriver, pub buildtime_plugin_driver: SharedPluginDriver, @@ -73,9 +73,9 @@ impl Compiler { options: CompilerOptions, plugins: Vec, buildtime_plugins: Vec, - output_filesystem: Option>, + output_filesystem: Option>, // only supports passing input_filesystem in rust api, no support for js api - input_filesystem: Option>, + input_filesystem: Option>, // no need to pass resolve_factory in rust api resolver_factory: Option>, loader_resolver_factory: Option>, diff --git a/crates/rspack_core/src/dependency/dependency_category.rs b/crates/rspack_core/src/dependency/dependency_category.rs index 94baaf42cf3d..5f0de90bcdac 100644 --- a/crates/rspack_core/src/dependency/dependency_category.rs +++ b/crates/rspack_core/src/dependency/dependency_category.rs @@ -7,6 +7,7 @@ pub enum DependencyCategory { Unknown, Esm, CommonJS, + Amd, Url, CssImport, CssCompose, @@ -41,6 +42,7 @@ impl DependencyCategory { DependencyCategory::Unknown => "unknown", DependencyCategory::Esm => "esm", DependencyCategory::CommonJS => "commonjs", + DependencyCategory::Amd => "amd", DependencyCategory::Url => "url", DependencyCategory::CssImport => "css-import", DependencyCategory::CssCompose => "css-compose", diff --git a/crates/rspack_core/src/dependency/dependency_type.rs b/crates/rspack_core/src/dependency/dependency_type.rs index 94ca7c898467..7787f12de8dc 100644 --- a/crates/rspack_core/src/dependency/dependency_type.rs +++ b/crates/rspack_core/src/dependency/dependency_type.rs @@ -33,6 +33,12 @@ pub enum DependencyType { CjsExportRequire, // cjs self reference CjsSelfReference, + // AMD + AmdDefine, + AmdRequireArray, + AmdRequireContext, + AmdRequire, + AmdRequireItem, // new URL("./foo", import.meta.url) NewUrl, // new Worker() @@ -126,6 +132,11 @@ impl DependencyType { DependencyType::CjsExports => "cjs exports", DependencyType::CjsExportRequire => "cjs export require", DependencyType::CjsSelfReference => "cjs self exports reference", + DependencyType::AmdDefine => "amd define", + DependencyType::AmdRequireArray => "amd require array", + DependencyType::AmdRequireContext => "amd require context", + DependencyType::AmdRequire => "amd", + DependencyType::AmdRequireItem => "amd require", DependencyType::NewUrl => "new URL()", DependencyType::NewWorker => "new Worker()", DependencyType::CreateScriptUrl => "create script url", diff --git a/crates/rspack_core/src/external_module.rs b/crates/rspack_core/src/external_module.rs index cc072a59edf0..b54ce0b208a0 100644 --- a/crates/rspack_core/src/external_module.rs +++ b/crates/rspack_core/src/external_module.rs @@ -272,11 +272,19 @@ impl ExternalModule { ) } "node-commonjs" if let Some(request) = request => { + let need_prefix = compilation + .options + .output + .environment + .supports_node_prefix_for_core_modules(); + if compilation.options.output.module { chunk_init_fragments.push( NormalInitFragment::new( - "import { createRequire as __WEBPACK_EXTERNAL_createRequire } from \"module\";\n" - .to_string(), + format!( + "import {{ createRequire as __WEBPACK_EXTERNAL_createRequire }} from \"{}\";\n", + if need_prefix { "node:module" } else { "module" } + ), InitFragmentStage::StageESMImports, 0, InitFragmentKey::ModuleExternal("node-commonjs".to_string()), diff --git a/crates/rspack_core/src/module.rs b/crates/rspack_core/src/module.rs index 7179bc54198c..3a0c7d007a98 100644 --- a/crates/rspack_core/src/module.rs +++ b/crates/rspack_core/src/module.rs @@ -8,7 +8,7 @@ use async_trait::async_trait; use json::JsonValue; use rspack_collections::{Identifiable, Identifier, IdentifierSet}; use rspack_error::{Diagnosable, Diagnostic, Result}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_hash::RspackHashDigest; use rspack_sources::Source; use rspack_util::atom::Atom; @@ -32,7 +32,7 @@ pub struct BuildContext { pub compiler_options: Arc, pub resolver_factory: Arc, pub plugin_driver: SharedPluginDriver, - pub fs: Arc, + pub fs: Arc, } #[derive(Debug, Clone, Hash, PartialEq, Eq)] diff --git a/crates/rspack_core/src/old_cache/mod.rs b/crates/rspack_core/src/old_cache/mod.rs index a9876f1be260..f617cc5ce0ab 100644 --- a/crates/rspack_core/src/old_cache/mod.rs +++ b/crates/rspack_core/src/old_cache/mod.rs @@ -12,9 +12,7 @@ mod local; mod occasion; mod storage; pub use local::*; -use occasion::{ - CodeGenerateOccasion, CreateChunkAssetsOccasion, ProcessRuntimeRequirementsOccasion, -}; +use occasion::{ChunkRenderOccasion, CodeGenerateOccasion, ProcessRuntimeRequirementsOccasion}; use storage::new_storage; #[derive(Debug)] @@ -22,7 +20,7 @@ pub struct Cache { is_idle: AtomicBool, pub code_generate_occasion: CodeGenerateOccasion, pub process_runtime_requirements_occasion: ProcessRuntimeRequirementsOccasion, - pub create_chunk_assets_occasion: CreateChunkAssetsOccasion, + pub chunk_render_occasion: ChunkRenderOccasion, } impl Cache { @@ -33,7 +31,7 @@ impl Cache { process_runtime_requirements_occasion: ProcessRuntimeRequirementsOccasion::new(new_storage( &options.cache, )), - create_chunk_assets_occasion: CreateChunkAssetsOccasion::new(new_storage(&options.cache)), + chunk_render_occasion: ChunkRenderOccasion::new(new_storage(&options.cache)), } } diff --git a/crates/rspack_core/src/old_cache/occasion/chunk_render.rs b/crates/rspack_core/src/old_cache/occasion/chunk_render.rs new file mode 100644 index 000000000000..81803c7c96a8 --- /dev/null +++ b/crates/rspack_core/src/old_cache/occasion/chunk_render.rs @@ -0,0 +1,51 @@ +use futures::Future; +use rspack_collections::Identifier; +use rspack_error::{Diagnostic, Result}; +use rspack_sources::BoxSource; + +use crate::{old_cache::storage, Chunk, Compilation, SourceType}; + +type Storage = dyn storage::Storage<(BoxSource, Vec)>; + +#[derive(Debug)] +pub struct ChunkRenderOccasion { + storage: Option>, +} + +impl ChunkRenderOccasion { + pub fn new(storage: Option>) -> Self { + Self { storage } + } + + pub async fn use_cache( + &self, + compilation: &Compilation, + chunk: &Chunk, + source_type: &SourceType, + generator: G, + ) -> Result<(BoxSource, Vec)> + where + G: FnOnce() -> F, + F: Future)>>, + { + let storage = match &self.storage { + Some(s) => s, + // no cache return directly + None => return generator().await, + }; + + let Some(content_hash) = + chunk.content_hash_by_source_type(&compilation.chunk_hashes_results, source_type) + else { + return generator().await; + }; + let cache_key = Identifier::from(content_hash.encoded()); + if let Some(value) = storage.get(&cache_key) { + Ok(value) + } else { + let res = generator().await?; + storage.set(cache_key, res.clone()); + Ok(res) + } + } +} diff --git a/crates/rspack_core/src/old_cache/occasion/create_chunk_assets.rs b/crates/rspack_core/src/old_cache/occasion/create_chunk_assets.rs deleted file mode 100644 index 9c3bd4bf5208..000000000000 --- a/crates/rspack_core/src/old_cache/occasion/create_chunk_assets.rs +++ /dev/null @@ -1,62 +0,0 @@ -use futures::Future; -use rspack_collections::{DatabaseItem, Identifier}; -use rspack_error::Result; - -use crate::{old_cache::storage, Chunk, Compilation, NormalModuleSource, RenderManifestEntry}; - -type Storage = dyn storage::Storage>; - -#[derive(Debug)] -pub struct CreateChunkAssetsOccasion { - storage: Option>, -} - -impl CreateChunkAssetsOccasion { - pub fn new(storage: Option>) -> Self { - Self { storage } - } - - pub async fn use_cache( - &self, - compilation: &Compilation, - chunk: &Chunk, - generator: G, - ) -> Result> - where - G: Fn() -> F, - F: Future>>, - { - let storage = match &self.storage { - Some(s) => s, - // no cache return directly - None => return generator().await, - }; - - let chunk_id = Identifier::from(chunk.expect_id()); - let modules = &compilation - .chunk_graph - .get_chunk_modules_identifier(&chunk.ukey()); - let is_cache_valid = modules.iter().all(|module_id| { - matches!( - compilation - .get_module_graph() - .module_by_identifier(module_id) - .and_then(|m| m.as_normal_module()) - .map(|m| matches!(m.source(), NormalModuleSource::Unbuild)), - Some(true) - ) - }); - - if is_cache_valid { - // read - if let Some(data) = storage.get(&chunk_id) { - return Ok(data); - } - } - // run generator and save to cache - let data = generator().await?; - // TODO sometime may not run save - storage.set(chunk_id, data.clone()); - Ok(data) - } -} diff --git a/crates/rspack_core/src/old_cache/occasion/mod.rs b/crates/rspack_core/src/old_cache/occasion/mod.rs index fa093479f953..8d8bf482d627 100644 --- a/crates/rspack_core/src/old_cache/occasion/mod.rs +++ b/crates/rspack_core/src/old_cache/occasion/mod.rs @@ -2,5 +2,5 @@ mod code_generate; pub use code_generate::*; mod process_runtime_requirements; pub use process_runtime_requirements::*; -mod create_chunk_assets; -pub use create_chunk_assets::*; +mod chunk_render; +pub use chunk_render::*; diff --git a/crates/rspack_core/src/options/compiler_options.rs b/crates/rspack_core/src/options/compiler_options.rs index e960928154da..96d95e704b68 100644 --- a/crates/rspack_core/src/options/compiler_options.rs +++ b/crates/rspack_core/src/options/compiler_options.rs @@ -18,6 +18,7 @@ pub struct CompilerOptions { pub node: Option, pub optimization: Optimization, pub profile: bool, + pub amd: Option, pub bail: bool, pub __references: References, } diff --git a/crates/rspack_core/src/options/output.rs b/crates/rspack_core/src/options/output.rs index 20004463b009..4cedae6b2e32 100644 --- a/crates/rspack_core/src/options/output.rs +++ b/crates/rspack_core/src/options/output.rs @@ -379,7 +379,6 @@ impl From for PublicPath { } } -#[allow(clippy::if_same_then_else)] pub fn get_css_chunk_filename_template<'filename>( chunk: &'filename Chunk, output_options: &'filename OutputOptions, @@ -395,23 +394,20 @@ pub fn get_css_chunk_filename_template<'filename>( } } -#[allow(clippy::if_same_then_else)] -pub fn get_js_chunk_filename_template<'filename>( - chunk: &'filename Chunk, - output_options: &'filename OutputOptions, +pub fn get_js_chunk_filename_template( + chunk: &Chunk, + output_options: &OutputOptions, chunk_group_by_ukey: &ChunkGroupByUkey, -) -> &'filename Filename { +) -> Filename { // Align with https://github.com/webpack/webpack/blob/8241da7f1e75c5581ba535d127fa66aeb9eb2ac8/lib/javascript/JavascriptModulesPlugin.js#L480 if let Some(filename_template) = chunk.filename_template() { - filename_template - } else if chunk.can_be_initial(chunk_group_by_ukey) { - &output_options.filename + filename_template.clone() } else if matches!(chunk.kind(), ChunkKind::HotUpdate) { - // TODO: Should return output_options.hotUpdateChunkFilename - // See https://github.com/webpack/webpack/blob/8241da7f1e75c5581ba535d127fa66aeb9eb2ac8/lib/javascript/JavascriptModulesPlugin.js#L484 - &output_options.chunk_filename + output_options.hot_update_chunk_filename.clone().into() + } else if chunk.can_be_initial(chunk_group_by_ukey) { + output_options.filename.clone() } else { - &output_options.chunk_filename + output_options.chunk_filename.clone() } } @@ -461,6 +457,7 @@ pub struct LibraryCustomUmdObject { pub struct Environment { pub r#const: Option, pub arrow_function: Option, + pub node_prefix_for_core_modules: Option, } impl Environment { @@ -471,4 +468,8 @@ impl Environment { pub fn supports_arrow_function(&self) -> bool { self.arrow_function.unwrap_or_default() } + + pub fn supports_node_prefix_for_core_modules(&self) -> bool { + self.node_prefix_for_core_modules.unwrap_or_default() + } } diff --git a/crates/rspack_core/src/resolver/boxfs.rs b/crates/rspack_core/src/resolver/boxfs.rs index 9ab9be1dbb64..e985ff2ed773 100644 --- a/crates/rspack_core/src/resolver/boxfs.rs +++ b/crates/rspack_core/src/resolver/boxfs.rs @@ -1,14 +1,14 @@ use std::{io, sync::Arc}; -use rspack_fs::{Error, SyncReadableFileSystem}; +use rspack_fs::{Error, FileSystem}; use rspack_paths::AssertUtf8; use rspack_resolver::{FileMetadata, FileSystem as ResolverFileSystem}; #[derive(Clone)] -pub struct BoxFS(Arc); +pub struct BoxFS(Arc); impl BoxFS { - pub fn new(fs: Arc) -> Self { + pub fn new(fs: Arc) -> Self { Self(fs) } } diff --git a/crates/rspack_core/src/resolver/factory.rs b/crates/rspack_core/src/resolver/factory.rs index 0cf898520dc5..270bc2d1ec0c 100644 --- a/crates/rspack_core/src/resolver/factory.rs +++ b/crates/rspack_core/src/resolver/factory.rs @@ -1,7 +1,7 @@ use std::{hash::BuildHasherDefault, sync::Arc}; use dashmap::DashMap; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rustc_hash::FxHasher; use super::resolver_impl::Resolver; @@ -29,7 +29,7 @@ impl ResolverFactory { self.resolver.clear_cache(); } - pub fn new(options: Resolve, fs: Arc) -> Self { + pub fn new(options: Resolve, fs: Arc) -> Self { Self { base_options: options.clone(), resolver: Resolver::new(options, fs), diff --git a/crates/rspack_core/src/resolver/resolver_impl.rs b/crates/rspack_core/src/resolver/resolver_impl.rs index 798782b56e0b..2f9a2e3f7576 100644 --- a/crates/rspack_core/src/resolver/resolver_impl.rs +++ b/crates/rspack_core/src/resolver/resolver_impl.rs @@ -8,7 +8,7 @@ use rspack_error::{ miette::{diagnostic, Diagnostic}, DiagnosticExt, Severity, TraceableError, }; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_loader_runner::DescriptionData; use rspack_paths::AssertUtf8; use rustc_hash::FxHashSet as HashSet; @@ -83,11 +83,11 @@ pub struct Resolver { } impl Resolver { - pub fn new(options: Resolve, fs: Arc) -> Self { + pub fn new(options: Resolve, fs: Arc) -> Self { Self::new_rspack_resolver(options, fs) } - fn new_rspack_resolver(options: Resolve, fs: Arc) -> Self { + fn new_rspack_resolver(options: Resolve, fs: Arc) -> Self { let options = to_rspack_resolver_options(options, false, DependencyCategory::Unknown); let boxfs = BoxFS::new(fs); let resolver = rspack_resolver::ResolverGeneric::new_with_file_system(boxfs, options); diff --git a/crates/rspack_core/src/runtime_globals.rs b/crates/rspack_core/src/runtime_globals.rs index e6927fe847c8..1266caa6c99b 100644 --- a/crates/rspack_core/src/runtime_globals.rs +++ b/crates/rspack_core/src/runtime_globals.rs @@ -250,6 +250,10 @@ bitflags! { const RSPACK_UNIQUE_ID = 1 << 65; const HAS_FETCH_PRIORITY = 1 << 66; + + // amd module support + const AMD_DEFINE = 1 << 67; + const AMD_OPTIONS = 1 << 68; } } @@ -295,6 +299,8 @@ impl RuntimeGlobals { R::GET_CHUNK_UPDATE_CSS_FILENAME => "__webpack_require__.hk", R::HMR_MODULE_DATA => "__webpack_require__.hmrD", R::HMR_RUNTIME_STATE_PREFIX => "__webpack_require__.hmrS", + R::AMD_DEFINE => "__webpack_require__.amdD", + R::AMD_OPTIONS => "__webpack_require__.amdO", R::EXTERNAL_INSTALL_CHUNK => "__webpack_require__.C", R::GET_FULL_HASH => "__webpack_require__.h", R::GLOBAL => "__webpack_require__.g", diff --git a/crates/rspack_core/src/utils/task_loop.rs b/crates/rspack_core/src/utils/task_loop.rs index cc962827cc3e..8b9e9d04f86f 100644 --- a/crates/rspack_core/src/utils/task_loop.rs +++ b/crates/rspack_core/src/utils/task_loop.rs @@ -87,7 +87,7 @@ pub async fn run_task_loop_with_event( tokio::spawn(task::unconstrained(async move { let r = task.background_run().await; if !is_expected_shutdown.load(Ordering::Relaxed) { - tx.send(r).expect("failed to send error message"); + tx.send(r).expect("failed to send task result"); } })); } diff --git a/crates/rspack_fs/Cargo.toml b/crates/rspack_fs/Cargo.toml index 92da930785fd..5282468a4fe7 100644 --- a/crates/rspack_fs/Cargo.toml +++ b/crates/rspack_fs/Cargo.toml @@ -7,6 +7,7 @@ repository = "https://github.com/web-infra-dev/rspack" version = "0.1.0" [dependencies] +async-trait = { workspace = true } dunce = { version = "1.0.5" } futures = { workspace = true } rspack_error = { path = "../rspack_error", version = "0.1.0" } diff --git a/crates/rspack_fs/src/fs.rs b/crates/rspack_fs/src/fs.rs new file mode 100644 index 000000000000..eedff4853f7e --- /dev/null +++ b/crates/rspack_fs/src/fs.rs @@ -0,0 +1,5 @@ +use std::fmt::Debug; + +use crate::{ReadableFileSystem, WritableFileSystem}; + +pub trait FileSystem: ReadableFileSystem + WritableFileSystem + Debug + Sync + Send {} diff --git a/crates/rspack_fs/src/lib.rs b/crates/rspack_fs/src/lib.rs index d9f4ca4a5cb3..45ae4ad91cb8 100644 --- a/crates/rspack_fs/src/lib.rs +++ b/crates/rspack_fs/src/lib.rs @@ -1,8 +1,10 @@ -mod r#async; -pub use r#async::{AsyncFileSystem, AsyncReadableFileSystem, AsyncWritableFileSystem}; +mod fs; +pub use fs::FileSystem; +mod read; +pub use read::ReadableFileSystem; -mod sync; -pub use sync::{SyncFileSystem, SyncReadableFileSystem, SyncWritableFileSystem}; +mod write; +pub use write::WritableFileSystem; mod file_metadata; pub use file_metadata::FileMetadata; diff --git a/crates/rspack_fs/src/memory_fs.rs b/crates/rspack_fs/src/memory_fs.rs index f089ef0f118a..574447484e30 100644 --- a/crates/rspack_fs/src/memory_fs.rs +++ b/crates/rspack_fs/src/memory_fs.rs @@ -7,10 +7,7 @@ use std::{ use futures::future::BoxFuture; use rspack_paths::{AssertUtf8, Utf8Path, Utf8PathBuf}; -use crate::{ - AsyncReadableFileSystem, AsyncWritableFileSystem, Error, FileMetadata, Result, - SyncReadableFileSystem, SyncWritableFileSystem, -}; +use crate::{Error, FileMetadata, FileSystem, ReadableFileSystem, Result, WritableFileSystem}; fn current_time() -> u64 { SystemTime::now() @@ -74,6 +71,7 @@ impl FileType { pub struct MemoryFileSystem { files: Mutex>, } +impl FileSystem for MemoryFileSystem {} impl MemoryFileSystem { pub fn clear(&self) { @@ -138,9 +136,9 @@ impl MemoryFileSystem { Ok(res.into_iter().collect()) } } - -impl SyncWritableFileSystem for MemoryFileSystem { - fn create_dir(&self, dir: &Utf8Path) -> Result<()> { +#[async_trait::async_trait] +impl WritableFileSystem for MemoryFileSystem { + async fn create_dir(&self, dir: &Utf8Path) -> Result<()> { if self.contains_dir(dir)? { return Ok(()); } @@ -156,20 +154,20 @@ impl SyncWritableFileSystem for MemoryFileSystem { Ok(()) } - fn create_dir_all(&self, dir: &Utf8Path) -> Result<()> { + async fn create_dir_all(&self, dir: &Utf8Path) -> Result<()> { if self.contains_dir(dir)? { return Ok(()); } if let Some(p) = dir.parent() { - SyncWritableFileSystem::create_dir_all(self, p)?; + WritableFileSystem::create_dir_all(self, p).await?; } let mut files = self.files.lock().expect("should get lock"); files.insert(dir.to_path_buf(), FileType::new_dir()); Ok(()) } - fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()> { + async fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()> { { // check file exist and update it let mut files = self.files.lock().expect("should get lock"); @@ -197,9 +195,34 @@ impl SyncWritableFileSystem for MemoryFileSystem { files.insert(file.to_path_buf(), FileType::new_file(data.to_vec())); Ok(()) } + + fn remove_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { + let fut = async move { self._remove_file(file) }; + Box::pin(fut) + } + + fn remove_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { + let fut = async move { self._remove_dir_all(dir) }; + Box::pin(fut) + } + + fn read_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result>> { + let fut = async move { self._read_dir(dir) }; + Box::pin(fut) + } + + fn read_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { + let fut = async move { ReadableFileSystem::read(self, file) }; + Box::pin(fut) + } + + fn stat<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result> { + let fut = async move { ReadableFileSystem::metadata(self, file) }; + Box::pin(fut) + } } -impl SyncReadableFileSystem for MemoryFileSystem { +impl ReadableFileSystem for MemoryFileSystem { fn read(&self, path: &Utf8Path) -> Result> { let files = self.files.lock().expect("should get lock"); match files.get(path) { @@ -224,53 +247,8 @@ impl SyncReadableFileSystem for MemoryFileSystem { let path = dunce::canonicalize(path)?; Ok(path.assert_utf8()) } -} - -impl AsyncWritableFileSystem for MemoryFileSystem { - fn create_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let fut = async move { SyncWritableFileSystem::create_dir(self, dir) }; - Box::pin(fut) - } - - fn create_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let fut = async move { SyncWritableFileSystem::create_dir_all(self, dir) }; - Box::pin(fut) - } - - fn write<'a>(&'a self, file: &'a Utf8Path, data: &'a [u8]) -> BoxFuture<'a, Result<()>> { - let fut = async move { SyncWritableFileSystem::write(self, file, data) }; - Box::pin(fut) - } - - fn remove_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let fut = async move { self._remove_file(file) }; - Box::pin(fut) - } - - fn remove_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let fut = async move { self._remove_dir_all(dir) }; - Box::pin(fut) - } - - fn read_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result>> { - let fut = async move { self._read_dir(dir) }; - Box::pin(fut) - } - - fn read_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { - let fut = async move { SyncReadableFileSystem::read(self, file) }; - Box::pin(fut) - } - - fn stat<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result> { - let fut = async move { SyncReadableFileSystem::metadata(self, file) }; - Box::pin(fut) - } -} - -impl AsyncReadableFileSystem for MemoryFileSystem { - fn read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { - let fut = async move { SyncReadableFileSystem::read(self, file) }; + fn async_read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { + let fut = async move { ReadableFileSystem::read(self, file) }; Box::pin(fut) } } @@ -279,183 +257,120 @@ impl AsyncReadableFileSystem for MemoryFileSystem { mod tests { use rspack_paths::Utf8Path; - use super::{ - AsyncReadableFileSystem, AsyncWritableFileSystem, MemoryFileSystem, SyncReadableFileSystem, - SyncWritableFileSystem, - }; - - #[test] - fn sync_fs_test() { - let fs = MemoryFileSystem::default(); - let file_content = "1".as_bytes(); - // init fs - SyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/b/c")).unwrap(); - SyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file1"), file_content).unwrap(); - std::thread::sleep(std::time::Duration::from_millis(100)); - - // test create_dir - assert!(SyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")).is_err()); - assert!(SyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d")).is_ok()); - assert!(SyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")).is_ok()); - assert!(SyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c/d")).is_err()); - assert!(SyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c")).is_err()); - - // test create_dir_all - assert!(SyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a1/b1/c1")).is_ok()); - assert!(SyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c/d")).is_err()); - assert!(SyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c")).is_err()); - - // test write - assert!( - SyncWritableFileSystem::write(&fs, Utf8Path::new("/a/temp/file2"), file_content).is_err() - ); - assert!(SyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file2"), file_content).is_ok()); - assert!( - SyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file1/file2"), file_content).is_err() - ); - - // read - assert!(SyncReadableFileSystem::read(&fs, Utf8Path::new("/a/temp/file2")).is_err()); - assert!(SyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file1/file2")).is_err()); - assert_eq!( - SyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file1")).unwrap(), - file_content - ); - assert_eq!( - SyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file2")).unwrap(), - file_content - ); - - // metadata - assert!(SyncReadableFileSystem::metadata(&fs, Utf8Path::new("/a/file1/c/d")).is_err()); - let file1_meta = SyncReadableFileSystem::metadata(&fs, Utf8Path::new("/a/file1")).unwrap(); - let file2_meta = SyncReadableFileSystem::metadata(&fs, Utf8Path::new("/a/file2")).unwrap(); - assert!(file1_meta.is_file); - assert!(file2_meta.is_file); - assert!(file1_meta.ctime_ms < file2_meta.ctime_ms); - let dir_meta = SyncReadableFileSystem::metadata(&fs, Utf8Path::new("/a/b")).unwrap(); - assert!(dir_meta.is_directory); - assert!(dir_meta.ctime_ms < file2_meta.ctime_ms); - - // clear - fs.clear(); - assert!(SyncReadableFileSystem::metadata(&fs, Utf8Path::new("/a/file1")).is_err()); - } + use super::{MemoryFileSystem, ReadableFileSystem, WritableFileSystem}; #[tokio::test] async fn async_fs_test() { let fs = MemoryFileSystem::default(); let file_content = "1".as_bytes(); // init fs - AsyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/b/c")) + WritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/b/c")) .await .unwrap(); - AsyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file1"), file_content) + WritableFileSystem::write(&fs, Utf8Path::new("/a/file1"), file_content) .await .unwrap(); tokio::time::sleep(std::time::Duration::from_millis(100)).await; // test create_dir assert!( - AsyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")) + WritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")) .await .is_err() ); assert!( - AsyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d")) + WritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d")) .await .is_ok() ); assert!( - AsyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")) + WritableFileSystem::create_dir(&fs, Utf8Path::new("/a/b/c/d/e")) .await .is_ok() ); assert!( - AsyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c/d")) + WritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c/d")) .await .is_err() ); assert!( - AsyncWritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c")) + WritableFileSystem::create_dir(&fs, Utf8Path::new("/a/file1/c")) .await .is_err() ); // test create_dir_all assert!( - AsyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a1/b1/c1")) + WritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a1/b1/c1")) .await .is_ok() ); assert!( - AsyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c/d")) + WritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c/d")) .await .is_err() ); assert!( - AsyncWritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c")) + WritableFileSystem::create_dir_all(&fs, Utf8Path::new("/a/file1/c")) .await .is_err() ); // test write assert!( - AsyncWritableFileSystem::write(&fs, Utf8Path::new("/a/temp/file2"), file_content) + WritableFileSystem::write(&fs, Utf8Path::new("/a/temp/file2"), file_content) .await .is_err() ); assert!( - AsyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file2"), file_content) + WritableFileSystem::write(&fs, Utf8Path::new("/a/file2"), file_content) .await .is_ok() ); assert!( - AsyncWritableFileSystem::write(&fs, Utf8Path::new("/a/file1/file2"), file_content) + WritableFileSystem::write(&fs, Utf8Path::new("/a/file1/file2"), file_content) .await .is_err() ); // read assert!( - AsyncReadableFileSystem::read(&fs, Utf8Path::new("/a/temp/file2")) + ReadableFileSystem::async_read(&fs, Utf8Path::new("/a/temp/file2")) .await .is_err() ); assert!( - AsyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file1/file2")) + ReadableFileSystem::async_read(&fs, Utf8Path::new("/a/file1/file2")) .await .is_err() ); assert_eq!( - AsyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file1")) + ReadableFileSystem::async_read(&fs, Utf8Path::new("/a/file1")) .await .unwrap(), file_content ); assert_eq!( - AsyncReadableFileSystem::read(&fs, Utf8Path::new("/a/file2")) + ReadableFileSystem::async_read(&fs, Utf8Path::new("/a/file2")) .await .unwrap(), file_content ); // stat - assert!( - AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/file1/c/d")) - .await - .is_err() - ); - let file1_meta = AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/file1")) + assert!(WritableFileSystem::stat(&fs, Utf8Path::new("/a/file1/c/d")) + .await + .is_err()); + let file1_meta = WritableFileSystem::stat(&fs, Utf8Path::new("/a/file1")) .await .unwrap(); - let file2_meta = AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/file2")) + let file2_meta = WritableFileSystem::stat(&fs, Utf8Path::new("/a/file2")) .await .unwrap(); assert!(file1_meta.is_file); assert!(file2_meta.is_file); assert!(file1_meta.ctime_ms < file2_meta.ctime_ms); - let dir_meta = AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/b")) + let dir_meta = WritableFileSystem::stat(&fs, Utf8Path::new("/a/b")) .await .unwrap(); assert!(dir_meta.is_directory); @@ -463,11 +378,11 @@ mod tests { // read dir assert!( - AsyncWritableFileSystem::read_dir(&fs, Utf8Path::new("/a2/b2/c2")) + WritableFileSystem::read_dir(&fs, Utf8Path::new("/a2/b2/c2")) .await .is_err(), ); - let children = AsyncWritableFileSystem::read_dir(&fs, Utf8Path::new("/a")) + let children = WritableFileSystem::read_dir(&fs, Utf8Path::new("/a")) .await .unwrap(); assert_eq!(children.len(), 3); @@ -477,46 +392,40 @@ mod tests { // remove file assert!( - AsyncWritableFileSystem::remove_file(&fs, Utf8Path::new("/a/b/c")) + WritableFileSystem::remove_file(&fs, Utf8Path::new("/a/b/c")) .await .is_err(), ); assert!( - AsyncWritableFileSystem::remove_file(&fs, Utf8Path::new("/a/file3")) + WritableFileSystem::remove_file(&fs, Utf8Path::new("/a/file3")) .await .is_ok(), ); assert!( - AsyncWritableFileSystem::remove_file(&fs, Utf8Path::new("/a/file2")) + WritableFileSystem::remove_file(&fs, Utf8Path::new("/a/file2")) .await .is_ok(), ); - assert!( - AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/file2")) - .await - .is_err(), - ); + assert!(WritableFileSystem::stat(&fs, Utf8Path::new("/a/file2")) + .await + .is_err(),); // remove dir assert!( - AsyncWritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a3/b3/c3")) + WritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a3/b3/c3")) .await .is_ok(), ); assert!( - AsyncWritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a/file1")) - .await - .is_err(), - ); - assert!( - AsyncWritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a")) - .await - .is_ok(), - ); - assert!( - AsyncWritableFileSystem::stat(&fs, Utf8Path::new("/a/file1")) + WritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a/file1")) .await .is_err(), ); + assert!(WritableFileSystem::remove_dir_all(&fs, Utf8Path::new("/a")) + .await + .is_ok(),); + assert!(WritableFileSystem::stat(&fs, Utf8Path::new("/a/file1")) + .await + .is_err(),); } } diff --git a/crates/rspack_fs/src/native_fs.rs b/crates/rspack_fs/src/native_fs.rs index 71421b706f4d..b2f3fa607a5a 100644 --- a/crates/rspack_fs/src/native_fs.rs +++ b/crates/rspack_fs/src/native_fs.rs @@ -3,65 +3,24 @@ use std::fs; use futures::future::BoxFuture; use rspack_paths::{AssertUtf8, Utf8Path, Utf8PathBuf}; -use crate::{ - AsyncReadableFileSystem, AsyncWritableFileSystem, Error, FileMetadata, Result, - SyncReadableFileSystem, SyncWritableFileSystem, -}; +use crate::{Error, FileMetadata, FileSystem, ReadableFileSystem, Result, WritableFileSystem}; #[derive(Debug)] pub struct NativeFileSystem; - -impl SyncWritableFileSystem for NativeFileSystem { - fn create_dir(&self, dir: &Utf8Path) -> Result<()> { +impl FileSystem for NativeFileSystem {} +#[async_trait::async_trait] +impl WritableFileSystem for NativeFileSystem { + async fn create_dir(&self, dir: &Utf8Path) -> Result<()> { fs::create_dir(dir).map_err(Error::from) } - fn create_dir_all(&self, dir: &Utf8Path) -> Result<()> { + async fn create_dir_all(&self, dir: &Utf8Path) -> Result<()> { fs::create_dir_all(dir).map_err(Error::from) } - fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()> { + async fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()> { fs::write(file, data).map_err(Error::from) } -} - -impl SyncReadableFileSystem for NativeFileSystem { - fn read(&self, path: &Utf8Path) -> Result> { - fs::read(path).map_err(Error::from) - } - - fn metadata(&self, path: &Utf8Path) -> Result { - let meta = fs::metadata(path)?; - meta.try_into() - } - - fn symlink_metadata(&self, path: &Utf8Path) -> Result { - let meta = fs::symlink_metadata(path)?; - meta.try_into() - } - - fn canonicalize(&self, path: &Utf8Path) -> Result { - let path = dunce::canonicalize(path)?; - Ok(path.assert_utf8()) - } -} - -impl AsyncWritableFileSystem for NativeFileSystem { - fn create_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let dir = dir.to_path_buf(); - let fut = async move { tokio::fs::create_dir(dir).await.map_err(Error::from) }; - Box::pin(fut) - } - - fn create_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { - let fut = async move { tokio::fs::create_dir_all(dir).await.map_err(Error::from) }; - Box::pin(fut) - } - - fn write<'a>(&'a self, file: &'a Utf8Path, data: &'a [u8]) -> BoxFuture<'a, Result<()>> { - let fut = async move { tokio::fs::write(file, data).await.map_err(Error::from) }; - Box::pin(fut) - } fn remove_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result<()>> { let fut = async move { tokio::fs::remove_file(file).await.map_err(Error::from) }; @@ -101,8 +60,26 @@ impl AsyncWritableFileSystem for NativeFileSystem { } } -impl AsyncReadableFileSystem for NativeFileSystem { - fn read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { +impl ReadableFileSystem for NativeFileSystem { + fn read(&self, path: &Utf8Path) -> Result> { + fs::read(path).map_err(Error::from) + } + + fn metadata(&self, path: &Utf8Path) -> Result { + let meta = fs::metadata(path)?; + meta.try_into() + } + + fn symlink_metadata(&self, path: &Utf8Path) -> Result { + let meta = fs::symlink_metadata(path)?; + meta.try_into() + } + + fn canonicalize(&self, path: &Utf8Path) -> Result { + let path = dunce::canonicalize(path)?; + Ok(path.assert_utf8()) + } + fn async_read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>> { let fut = async move { tokio::fs::read(file).await.map_err(Error::from) }; Box::pin(fut) } diff --git a/crates/rspack_fs/src/read.rs b/crates/rspack_fs/src/read.rs new file mode 100644 index 000000000000..b36c7e70f5f5 --- /dev/null +++ b/crates/rspack_fs/src/read.rs @@ -0,0 +1,24 @@ +use std::fmt::Debug; + +use futures::future::BoxFuture; +use rspack_paths::Utf8Path; +use rspack_paths::Utf8PathBuf; + +use crate::{FileMetadata, Result}; +pub trait ReadableFileSystem: Debug + Send + Sync { + /// See [std::fs::read] + fn read(&self, path: &Utf8Path) -> Result>; + + /// See [std::fs::metadata] + fn metadata(&self, path: &Utf8Path) -> Result; + + /// See [std::fs::symlink_metadata] + fn symlink_metadata(&self, path: &Utf8Path) -> Result; + + /// See [std::fs::canonicalize] + fn canonicalize(&self, path: &Utf8Path) -> Result; + /// Read the entire contents of a file into a bytes vector. + /// + /// Error: This function will return an error if path does not already exist. + fn async_read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>>; +} diff --git a/crates/rspack_fs/src/sync.rs b/crates/rspack_fs/src/sync.rs deleted file mode 100644 index 8c0fd95ca7b9..000000000000 --- a/crates/rspack_fs/src/sync.rs +++ /dev/null @@ -1,46 +0,0 @@ -use std::fmt::Debug; - -use rspack_paths::{Utf8Path, Utf8PathBuf}; - -use super::{FileMetadata, Result}; - -pub trait SyncWritableFileSystem: Debug { - /// Creates a new, empty directory at the provided path. - /// - /// NOTE: If a parent of the given path doesn’t exist, this function is supposed to return an error. - /// To create a directory and all its missing parents at the same time, use the [`create_dir_all`] function. - /// - /// Error: - /// This function is supposed to return an error in the following situations, but is not limited to just these cases: - /// - User lacks permissions to create directory at path. - /// - A parent of the given path doesn’t exist. (To create a directory and all its missing parents at the same time, use the create_dir_all function.) - /// - Path already exists. - fn create_dir(&self, dir: &Utf8Path) -> Result<()>; - - /// Recursively create a directory and all of its parent components if they are missing. - fn create_dir_all(&self, dir: &Utf8Path) -> Result<()>; - - /// Write a slice as the entire contents of a file. - /// This function will create a file if it does not exist, and will entirely replace its contents if it does. - fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()>; -} - -pub trait SyncReadableFileSystem: Debug + Send + Sync { - /// See [std::fs::read] - fn read(&self, path: &Utf8Path) -> Result>; - - /// See [std::fs::metadata] - fn metadata(&self, path: &Utf8Path) -> Result; - - /// See [std::fs::symlink_metadata] - fn symlink_metadata(&self, path: &Utf8Path) -> Result; - - /// See [std::fs::canonicalize] - fn canonicalize(&self, path: &Utf8Path) -> Result; -} - -/// Readable and writable file system representation. -pub trait SyncFileSystem: SyncReadableFileSystem + SyncWritableFileSystem {} - -// Blanket implementation for all types that implement both [`ReadableFileSystem`] and [`WritableFileSystem`]. -impl SyncFileSystem for T {} diff --git a/crates/rspack_fs/src/async.rs b/crates/rspack_fs/src/write.rs similarity index 63% rename from crates/rspack_fs/src/async.rs rename to crates/rspack_fs/src/write.rs index e50e34e85955..7ec4cadfc494 100644 --- a/crates/rspack_fs/src/async.rs +++ b/crates/rspack_fs/src/write.rs @@ -3,9 +3,10 @@ use std::fmt::Debug; use futures::future::BoxFuture; use rspack_paths::Utf8Path; -use crate::{FileMetadata, Result}; +use super::{FileMetadata, Result}; -pub trait AsyncWritableFileSystem: Debug { +#[async_trait::async_trait] +pub trait WritableFileSystem: Debug + Send + Sync { /// Creates a new, empty directory at the provided path. /// /// NOTE: If a parent of the given path doesn’t exist, this function is supposed to return an error. @@ -16,14 +17,14 @@ pub trait AsyncWritableFileSystem: Debug { /// - User lacks permissions to create directory at path. /// - A parent of the given path doesn’t exist. (To create a directory and all its missing parents at the same time, use the create_dir_all function.) /// - Path already exists. - fn create_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>>; + async fn create_dir(&self, dir: &Utf8Path) -> Result<()>; /// Recursively create a directory and all of its parent components if they are missing. - fn create_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, Result<()>>; + async fn create_dir_all(&self, dir: &Utf8Path) -> Result<()>; /// Write a slice as the entire contents of a file. /// This function will create a file if it does not exist, and will entirely replace its contents if it does. - fn write<'a>(&'a self, file: &'a Utf8Path, data: &'a [u8]) -> BoxFuture<'a, Result<()>>; + async fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()>; /// Removes a file from the filesystem. fn remove_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result<()>>; @@ -39,16 +40,3 @@ pub trait AsyncWritableFileSystem: Debug { fn stat<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>; } - -pub trait AsyncReadableFileSystem: Debug { - /// Read the entire contents of a file into a bytes vector. - /// - /// Error: This function will return an error if path does not already exist. - fn read<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, Result>>; -} - -/// Async readable and writable file system representation. -pub trait AsyncFileSystem: AsyncReadableFileSystem + AsyncWritableFileSystem {} - -// Blanket implementation for all types that implement both [`AsyncReadableFileSystem`] and [`WritableFileSystem`]. -impl AsyncFileSystem for T {} diff --git a/crates/rspack_fs_node/Cargo.toml b/crates/rspack_fs_node/Cargo.toml index cfbf620a23a0..e7dcb6f14d64 100644 --- a/crates/rspack_fs_node/Cargo.toml +++ b/crates/rspack_fs_node/Cargo.toml @@ -10,6 +10,7 @@ crate-type = ["cdylib", "rlib"] [dependencies] +async-trait = { workspace = true } futures = { workspace = true } napi = { workspace = true, features = ["napi4", "tokio_rt"] } napi-derive = { workspace = true } diff --git a/crates/rspack_fs_node/src/lib.rs b/crates/rspack_fs_node/src/lib.rs index 82715f435535..e7429ea28c1f 100644 --- a/crates/rspack_fs_node/src/lib.rs +++ b/crates/rspack_fs_node/src/lib.rs @@ -1,7 +1,7 @@ #![allow(clippy::unwrap_in_result)] -mod r#async; -pub use r#async::AsyncNodeWritableFileSystem; +mod write; +pub use write::NodeFileSystem; mod node; pub use node::ThreadsafeNodeFS; diff --git a/crates/rspack_fs_node/src/sync.rs b/crates/rspack_fs_node/src/sync.rs deleted file mode 100644 index 7e26b2c38b5f..000000000000 --- a/crates/rspack_fs_node/src/sync.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::marker::PhantomData; - -use napi::Env; -use rspack_fs::{Error, Result, WritableFileSystem}; -use rspack_paths::Utf8Path; - -use crate::node::{NodeFS, NodeFSRef, TryIntoNodeFSRef}; - -pub struct NodeWritableFileSystem { - env: Env, - fs_ref: NodeFSRef, - _data: PhantomData<*mut ()>, -} - -impl std::fmt::Debug for NodeWritableFileSystem { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("NodeWritableFileSystem").finish() - } -} - -impl NodeWritableFileSystem { - pub fn new(env: Env, fs: NodeFS) -> napi::Result { - Ok(Self { - env, - fs_ref: fs.try_into_node_fs_ref(&env)?, - _data: PhantomData, - }) - } -} - -impl WritableFileSystem for NodeWritableFileSystem { - fn create_dir(&self, dir: &Utf8Path) -> Result<()> { - let dir = dir.as_str(); - let mkdir = self.fs_ref.mkdir.get().expect("Failed to get mkdir"); - mkdir - .call( - None, - &[self - .env - .create_string(dir) - .expect("Failed to create string")], - ) - .map_err(|err| { - Error::Io(std::io::Error::new( - std::io::ErrorKind::Other, - err.to_string(), - )) - })?; - - Ok(()) - } - - fn create_dir_all(&self, dir: &Utf8Path) -> Result<()> { - let dir = dir.as_str(); - let mkdirp = self.fs_ref.mkdirp.get().expect("Failed to get mkdirp"); - mkdirp - .call( - None, - &[self - .env - .create_string(dir) - .expect("Failed to create string")], - ) - .map_err(|err| { - Error::Io(std::io::Error::new( - std::io::ErrorKind::Other, - err.to_string(), - )) - })?; - - Ok(()) - } - - fn write(&self, file: &Utf8Path, data: &[u8]) -> Result<()> { - let file = file.as_str(); - let buf = data.to_vec(); - let write_file = self - .fs_ref - .write_file - .get() - .expect("Failed to get write_file"); - - write_file - .call( - None, - &[ - self - .env - .create_string(file) - .expect("Failed to create string") - .into_unknown(), - self - .env - .create_buffer_with_data(buf) - .expect("Failed to create buffer") - .into_unknown(), - ], - ) - .map_err(|err| { - Error::Io(std::io::Error::new( - std::io::ErrorKind::Other, - err.to_string(), - )) - })?; - - Ok(()) - } -} diff --git a/crates/rspack_fs_node/src/async.rs b/crates/rspack_fs_node/src/write.rs similarity index 87% rename from crates/rspack_fs_node/src/async.rs rename to crates/rspack_fs_node/src/write.rs index b720268e0348..f1a95f4f27a0 100644 --- a/crates/rspack_fs_node/src/async.rs +++ b/crates/rspack_fs_node/src/write.rs @@ -1,26 +1,27 @@ +use async_trait::async_trait; use futures::future::BoxFuture; use napi::{bindgen_prelude::Either3, Either}; -use rspack_fs::{AsyncWritableFileSystem, FileMetadata}; +use rspack_fs::{FileMetadata, WritableFileSystem}; use rspack_paths::Utf8Path; use crate::node::ThreadsafeNodeFS; -pub struct AsyncNodeWritableFileSystem(ThreadsafeNodeFS); +pub struct NodeFileSystem(ThreadsafeNodeFS); -impl std::fmt::Debug for AsyncNodeWritableFileSystem { +impl std::fmt::Debug for NodeFileSystem { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("AsyncNodeWritableFileSystem").finish() } } -impl AsyncNodeWritableFileSystem { +impl NodeFileSystem { pub fn new(tsfs: ThreadsafeNodeFS) -> napi::Result { Ok(Self(tsfs)) } } - -impl AsyncWritableFileSystem for AsyncNodeWritableFileSystem { - fn create_dir<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, rspack_fs::Result<()>> { +#[async_trait] +impl WritableFileSystem for NodeFileSystem { + async fn create_dir(&self, dir: &Utf8Path) -> rspack_fs::Result<()> { let fut = async { let dir = dir.as_str().to_string(); self.0.mkdir.call(dir).await.map_err(|e| { @@ -31,10 +32,10 @@ impl AsyncWritableFileSystem for AsyncNodeWritableFileSystem { }) }; - Box::pin(fut) + fut.await } - fn create_dir_all<'a>(&'a self, dir: &'a Utf8Path) -> BoxFuture<'a, rspack_fs::Result<()>> { + async fn create_dir_all(&self, dir: &Utf8Path) -> rspack_fs::Result<()> { let fut = async { let dir = dir.as_str().to_string(); self @@ -50,14 +51,10 @@ impl AsyncWritableFileSystem for AsyncNodeWritableFileSystem { }) .map(|_| ()) }; - Box::pin(fut) + fut.await } - fn write<'a>( - &'a self, - file: &'a Utf8Path, - data: &'a [u8], - ) -> BoxFuture<'a, rspack_fs::Result<()>> { + async fn write(&self, file: &Utf8Path, data: &[u8]) -> rspack_fs::Result<()> { let fut = async { let file = file.as_str().to_string(); let data = data.to_vec(); @@ -73,7 +70,7 @@ impl AsyncWritableFileSystem for AsyncNodeWritableFileSystem { )) }) }; - Box::pin(fut) + fut.await } fn remove_file<'a>(&'a self, file: &'a Utf8Path) -> BoxFuture<'a, rspack_fs::Result<()>> { diff --git a/crates/rspack_loader_runner/src/runner.rs b/crates/rspack_loader_runner/src/runner.rs index a0a326e60c22..b8436d730d03 100644 --- a/crates/rspack_loader_runner/src/runner.rs +++ b/crates/rspack_loader_runner/src/runner.rs @@ -1,7 +1,7 @@ use std::{fmt::Debug, path::PathBuf, sync::Arc}; use rspack_error::{error, IntoTWithDiagnosticArray, Result, TWithDiagnosticArray}; -use rspack_fs::SyncReadableFileSystem; +use rspack_fs::FileSystem; use rspack_sources::SourceMap; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; use tokio::task::spawn_blocking; @@ -27,7 +27,7 @@ impl LoaderContext { async fn process_resource( loader_context: &mut LoaderContext, - fs: Arc, + fs: Arc, ) -> Result<()> { if let Some(plugin) = &loader_context.plugin && let Some(processed_resource) = plugin @@ -108,7 +108,7 @@ pub async fn run_loaders( resource_data: Arc, plugins: Option>>, context: Context, - fs: Arc, + fs: Arc, ) -> Result> { let loaders = loaders .into_iter() diff --git a/crates/rspack_macros/src/cacheable.rs b/crates/rspack_macros/src/cacheable.rs deleted file mode 100644 index 9253888ee5dc..000000000000 --- a/crates/rspack_macros/src/cacheable.rs +++ /dev/null @@ -1,258 +0,0 @@ -use proc_macro::TokenStream; -use quote::quote; -use syn::{ - parse::{Parse, ParseStream}, - parse_macro_input, parse_quote, - visit_mut::VisitMut, - Field, GenericParam, Item, Result, Token, Type, -}; - -mod kw { - syn::custom_keyword!(with); - syn::custom_keyword!(crate_path); - syn::custom_keyword!(hashable); -} - -/// #[cacheable] type-only args -pub struct CacheableArgs { - pub crate_path: syn::Path, - pub with: Option, - pub hashable: bool, -} -impl Parse for CacheableArgs { - fn parse(input: ParseStream) -> Result { - let mut with = None; - let mut crate_path = parse_quote! { ::rspack_cacheable }; - let mut hashable = false; - - let mut needs_punct = false; - while !input.is_empty() { - if needs_punct { - input.parse::()?; - } - - if input.peek(kw::crate_path) { - input.parse::()?; - input.parse::()?; - crate_path = input.parse::()?; - } else if input.peek(kw::with) { - if with.is_some() { - return Err(input.error("duplicate with argument")); - } - - input.parse::()?; - input.parse::()?; - with = Some(input.parse::()?); - } else if input.peek(kw::hashable) { - input.parse::()?; - hashable = true; - } else { - return Err(input.error("unexpected #[cacheable] type-only parameters")); - } - - needs_punct = true; - } - - Ok(Self { - crate_path, - with, - hashable, - }) - } -} - -/// A visitor to transform #[cacheable] on field -#[derive(Default)] -struct FieldAttrVisitor { - /// Remove all #[cacheable] attr on field - clean: bool, - /// Whether any field set #[cacheable(omit_bounds)] - omit_bounds: bool, -} - -impl VisitMut for FieldAttrVisitor { - fn visit_field_mut(&mut self, f: &mut Field) { - let mut with_info = None; - let mut omit_bounds = false; - f.attrs.retain(|item| { - if item.path().is_ident("cacheable") { - let _ = item.parse_nested_meta(|meta| { - if meta.path.is_ident("with") { - meta.input.parse::()?; - with_info = Some(meta.input.parse::()?); - return Ok(()); - } - if meta.path.is_ident("omit_bounds") { - omit_bounds = true; - return Ok(()); - } - Err(meta.error("unrecognized cacheable arguments")) - }); - false - } else { - true - } - }); - - // enable clean, just remove all cacheable attributes - if self.clean { - return; - } - // add rkyv with - if let Some(with_info) = with_info { - f.attrs.push(parse_quote!(#[rkyv(with=#with_info)])); - } - // add rkyv omit_bounds - if omit_bounds { - self.omit_bounds = true; - f.attrs.push(parse_quote!(#[rkyv(omit_bounds)])); - } - } -} - -/// impl #[cacheable] without with args -pub fn impl_cacheable(tokens: TokenStream, args: CacheableArgs) -> TokenStream { - let mut input = parse_macro_input!(tokens as Item); - - let mut visitor = FieldAttrVisitor::default(); - visitor.visit_item_mut(&mut input); - - let crate_path = &args.crate_path; - let archived_impl_hash = if args.hashable { - quote! {#[rkyv(derive(Hash, PartialEq, Eq))]} - } else { - quote! {} - }; - let bounds = if visitor.omit_bounds { - quote! { - #[rkyv(serialize_bounds( - __S: #crate_path::__private::rkyv::ser::Writer + #crate_path::__private::rkyv::ser::Allocator + #crate_path::__private::rkyv::rancor::Fallible, - ))] - #[rkyv(deserialize_bounds( - __D: #crate_path::__private::rkyv::rancor::Fallible - ))] - #[rkyv(bytecheck( - bounds( - __C: #crate_path::__private::rkyv::validation::ArchiveContext + #crate_path::__private::rkyv::rancor::Fallible, - ) - ))] - } - } else { - quote! {} - }; - - quote! { - #[derive( - #crate_path::__private::rkyv::Archive, - #crate_path::__private::rkyv::Deserialize, - #crate_path::__private::rkyv::Serialize - )] - #[rkyv(crate=#crate_path::__private::rkyv)] - #archived_impl_hash - #bounds - #input - } - .into() -} - -/// impl #[cacheable] with `with` args -pub fn impl_cacheable_with(tokens: TokenStream, args: CacheableArgs) -> TokenStream { - let mut input = parse_macro_input!(tokens as Item); - - let mut visitor = FieldAttrVisitor { - clean: true, - ..Default::default() - }; - visitor.visit_item_mut(&mut input); - - let crate_path = args.crate_path; - let with = args.with; - - let (ident, generics) = match &input { - Item::Enum(input) => (&input.ident, &input.generics), - Item::Struct(input) => (&input.ident, &input.generics), - _ => panic!("expect enum or struct"), - }; - let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let generic_params = generics.params.iter().map(|p| { - // remove default value - let mut p = p.clone(); - if let GenericParam::Type(param) = &mut p { - param.eq_token = None; - param.default = None; - } - quote! { #p } - }); - let generic_params = quote! { #(#generic_params),* }; - - let where_params = if let Some(where_clause) = &generics.where_clause { - let params = where_clause.predicates.iter().map(|w| { - quote! { #w } - }); - quote! { #(#params),* } - } else { - quote! {} - }; - - let archived = quote! {<#with as rkyv::with::ArchiveWith<#ident #ty_generics>>::Archived}; - let resolver = quote! {<#with as rkyv::with::ArchiveWith<#ident #ty_generics>>::Resolver}; - quote! { - #input - #[allow(non_upper_case_globals)] - const _: () = { - use #crate_path::__private::rkyv; - use rkyv::{ - rancor::Fallible, - with::{ArchiveWith, DeserializeWith, SerializeWith}, - Archive, Deserialize, Place, Serialize - }; - impl #impl_generics Archive for #ident #ty_generics #where_clause { - type Archived = #archived; - type Resolver = #resolver; - #[inline] - fn resolve(&self, resolver: Self::Resolver, out: Place) { - <#with as ArchiveWith<#ident #ty_generics>>::resolve_with(self, resolver, out) - } - } - impl<__S, #generic_params> Serialize<__S> for #ident #ty_generics - where - __S: Fallible + ?Sized, - #with: SerializeWith<#ident #ty_generics, __S>, - #where_params - { - #[inline] - fn serialize(&self, serializer: &mut __S) -> Result { - #with::serialize_with(self, serializer) - } - } - impl<__D, #generic_params> Deserialize<#ident #ty_generics, __D> for #archived - where - __D: Fallible + ?Sized, - #with: DeserializeWith<#archived, #ident #ty_generics, __D>, - #where_params - { - #[inline] - fn deserialize(&self, deserializer: &mut __D) -> Result<#ident #ty_generics, __D::Error> { - #with::deserialize_with(self, deserializer) - } - } - }; - } - .into() -} - -/// impl cacheable when disable -pub fn disable_cacheable(tokens: TokenStream) -> TokenStream { - let mut input = parse_macro_input!(tokens as Item); - - let mut visitor = FieldAttrVisitor { - clean: true, - ..Default::default() - }; - visitor.visit_item_mut(&mut input); - - quote! { - #input - } - .into() -} diff --git a/crates/rspack_macros/src/cacheable/args.rs b/crates/rspack_macros/src/cacheable/args.rs new file mode 100644 index 000000000000..b840d3eef430 --- /dev/null +++ b/crates/rspack_macros/src/cacheable/args.rs @@ -0,0 +1,65 @@ +use syn::{ + parse::{Parse, ParseStream}, + parse_quote, Result, Token, +}; + +mod kw { + syn::custom_keyword!(with); + syn::custom_keyword!(hashable); +} + +/// #[cacheable] type-only args +pub struct CacheableArgs { + pub crate_path: syn::Path, + pub r#as: Option, + pub with: Option, + pub hashable: bool, +} + +impl Parse for CacheableArgs { + fn parse(input: ParseStream) -> Result { + let mut crate_path = parse_quote! { ::rspack_cacheable }; + let mut r#as = None; + let mut with = None; + let mut hashable = false; + + let mut needs_punct = false; + while !input.is_empty() { + if needs_punct { + input.parse::()?; + } + + if input.peek(syn::token::Crate) { + input.parse::()?; + input.parse::()?; + crate_path = input.parse::()?; + } else if input.peek(syn::token::As) { + input.parse::()?; + input.parse::()?; + r#as = Some(input.parse::()?); + } else if input.peek(kw::with) { + if with.is_some() { + return Err(input.error("duplicate with argument")); + } + + input.parse::()?; + input.parse::()?; + with = Some(input.parse::()?); + } else if input.peek(kw::hashable) { + input.parse::()?; + hashable = true; + } else { + return Err(input.error("unexpected #[cacheable] type-only parameters")); + } + + needs_punct = true; + } + + Ok(Self { + crate_path, + r#as, + with, + hashable, + }) + } +} diff --git a/crates/rspack_macros/src/cacheable/impl.rs b/crates/rspack_macros/src/cacheable/impl.rs new file mode 100644 index 000000000000..f5b018fb910f --- /dev/null +++ b/crates/rspack_macros/src/cacheable/impl.rs @@ -0,0 +1,93 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, parse_quote, visit_mut::VisitMut, Field, Item, Token, Type}; + +use super::CacheableArgs; + +/// A visitor to collect #[cacheable] info on field +#[derive(Default)] +struct FieldAttrVisitor { + /// Whether any field set #[cacheable(omit_bounds)] + omit_bounds: bool, +} + +impl VisitMut for FieldAttrVisitor { + fn visit_field_mut(&mut self, f: &mut Field) { + let mut with_info = None; + let mut omit_bounds = false; + f.attrs.retain(|item| { + if item.path().is_ident("cacheable") { + let _ = item.parse_nested_meta(|meta| { + if meta.path.is_ident("with") { + meta.input.parse::()?; + with_info = Some(meta.input.parse::()?); + return Ok(()); + } + if meta.path.is_ident("omit_bounds") { + omit_bounds = true; + return Ok(()); + } + Err(meta.error("unrecognized cacheable arguments")) + }); + false + } else { + true + } + }); + + // add rkyv with + if let Some(with_info) = with_info { + f.attrs.push(parse_quote!(#[rkyv(with=#with_info)])); + } + // add rkyv omit_bounds + if omit_bounds { + self.omit_bounds = true; + f.attrs.push(parse_quote!(#[rkyv(omit_bounds)])); + } + } +} + +/// impl #[cacheable] without with args +pub fn impl_cacheable(tokens: TokenStream, args: CacheableArgs) -> TokenStream { + let mut input = parse_macro_input!(tokens as Item); + + let mut visitor = FieldAttrVisitor::default(); + visitor.visit_item_mut(&mut input); + + let crate_path = &args.crate_path; + let archived_impl_hash = if args.hashable { + quote! {#[rkyv(derive(Hash, PartialEq, Eq))]} + } else { + quote! {} + }; + let bounds = if visitor.omit_bounds { + quote! { + #[rkyv(serialize_bounds( + __S: #crate_path::__private::rkyv::ser::Writer + #crate_path::__private::rkyv::ser::Allocator + #crate_path::__private::rkyv::rancor::Fallible, + ))] + #[rkyv(deserialize_bounds( + __D: #crate_path::__private::rkyv::rancor::Fallible + ))] + #[rkyv(bytecheck( + bounds( + __C: #crate_path::__private::rkyv::validation::ArchiveContext + #crate_path::__private::rkyv::rancor::Fallible, + ) + ))] + } + } else { + quote! {} + }; + + quote! { + #[derive( + #crate_path::__private::rkyv::Archive, + #crate_path::__private::rkyv::Deserialize, + #crate_path::__private::rkyv::Serialize + )] + #[rkyv(crate=#crate_path::__private::rkyv)] + #archived_impl_hash + #bounds + #input + } + .into() +} diff --git a/crates/rspack_macros/src/cacheable/impl_as.rs b/crates/rspack_macros/src/cacheable/impl_as.rs new file mode 100644 index 000000000000..37051eaa6cd9 --- /dev/null +++ b/crates/rspack_macros/src/cacheable/impl_as.rs @@ -0,0 +1,90 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, parse_quote, visit_mut::VisitMut, Field, Item, Token, Type}; + +use super::CacheableArgs; + +/// A visitor to add #[cacheable(omit_bounds)] and collect #[cacheable(with=...)] info on field +#[derive(Default)] +struct FieldAttrVisitor { + /// with info collected + /// + /// # Example + /// + /// ```rust,ignore + /// #[cacheable] + /// struct Test { + /// #[cacheable(with=AsMap)] + /// test_field: HashMap, + /// } + /// + /// // with_info is vec![(AsMap, HashMap)] + /// ``` + with_info: Vec<(Type, Type)>, +} + +impl VisitMut for FieldAttrVisitor { + fn visit_field_mut(&mut self, f: &mut Field) { + let mut with_info = None; + f.attrs.retain(|item| { + if item.path().is_ident("cacheable") { + let _ = item.parse_nested_meta(|meta| { + if meta.path.is_ident("with") { + meta.input.parse::()?; + with_info = Some(meta.input.parse::()?); + return Ok(()); + } + if meta.path.is_ident("omit_bounds") { + return Ok(()); + } + Err(meta.error("unrecognized cacheable arguments")) + }); + false + } else { + true + } + }); + + // add rkyv with + if let Some(with_info) = with_info { + f.attrs.push(parse_quote!(#[rkyv(with=#with_info)])); + self.with_info.push((with_info, f.ty.clone())); + } + // add rkyv omit_bounds + f.attrs.push(parse_quote!(#[rkyv(omit_bounds)])); + } +} + +/// impl #[cacheable] with `as` args +pub fn impl_cacheable_as(tokens: TokenStream, args: CacheableArgs) -> TokenStream { + let mut input = parse_macro_input!(tokens as Item); + + let mut visitor = FieldAttrVisitor::default(); + visitor.visit_item_mut(&mut input); + + let crate_path = &args.crate_path; + let r#as = &args.r#as; + + let serialize_bounds = visitor + .with_info + .iter() + .map(|(with, ty)| quote!(#with: #crate_path::__private::rkyv::with::SerializeWith<#ty, __S>)); + let serialize_bounds = quote! { #(#serialize_bounds),* }; + + quote! { + #[derive( + #crate_path::__private::rkyv::Archive, + #crate_path::__private::rkyv::Serialize + )] + #[rkyv( + crate=#crate_path::__private::rkyv, + as=#crate_path::__private::rkyv::Archived<#r#as> + )] + #[rkyv(serialize_bounds( + __S: #crate_path::__private::rkyv::ser::Writer + #crate_path::__private::rkyv::ser::Allocator + #crate_path::__private::rkyv::rancor::Fallible, + #serialize_bounds + ))] + #input + } + .into() +} diff --git a/crates/rspack_macros/src/cacheable/impl_disable.rs b/crates/rspack_macros/src/cacheable/impl_disable.rs new file mode 100644 index 000000000000..6bc2fb469f37 --- /dev/null +++ b/crates/rspack_macros/src/cacheable/impl_disable.rs @@ -0,0 +1,25 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, visit_mut::VisitMut, Field, Item}; + +/// A visitor to remove #[cacheable] on field +struct CleanFieldAttrVisitor; + +impl VisitMut for CleanFieldAttrVisitor { + fn visit_field_mut(&mut self, f: &mut Field) { + f.attrs.retain(|item| !item.path().is_ident("cacheable")); + } +} + +/// impl cacheable when disable +pub fn disable_cacheable(tokens: TokenStream) -> TokenStream { + let mut input = parse_macro_input!(tokens as Item); + + let mut visitor = CleanFieldAttrVisitor; + visitor.visit_item_mut(&mut input); + + quote! { + #input + } + .into() +} diff --git a/crates/rspack_macros/src/cacheable/impl_with.rs b/crates/rspack_macros/src/cacheable/impl_with.rs new file mode 100644 index 000000000000..3847b9e4d8c0 --- /dev/null +++ b/crates/rspack_macros/src/cacheable/impl_with.rs @@ -0,0 +1,97 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, visit_mut::VisitMut, Field, GenericParam, Item}; + +use super::CacheableArgs; + +/// A visitor to remove #[cacheable] on field +struct CleanFieldAttrVisitor; + +impl VisitMut for CleanFieldAttrVisitor { + fn visit_field_mut(&mut self, f: &mut Field) { + f.attrs.retain(|item| !item.path().is_ident("cacheable")); + } +} + +/// impl #[cacheable] with `with` args +pub fn impl_cacheable_with(tokens: TokenStream, args: CacheableArgs) -> TokenStream { + let mut input = parse_macro_input!(tokens as Item); + + let mut visitor = CleanFieldAttrVisitor; + visitor.visit_item_mut(&mut input); + + let crate_path = args.crate_path; + let with = args.with; + + let (ident, generics) = match &input { + Item::Enum(input) => (&input.ident, &input.generics), + Item::Struct(input) => (&input.ident, &input.generics), + _ => panic!("expect enum or struct"), + }; + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let generic_params = generics.params.iter().map(|p| { + // remove default value + let mut p = p.clone(); + if let GenericParam::Type(param) = &mut p { + param.eq_token = None; + param.default = None; + } + quote! { #p } + }); + let generic_params = quote! { #(#generic_params),* }; + + let where_params = if let Some(where_clause) = &generics.where_clause { + let params = where_clause.predicates.iter().map(|w| { + quote! { #w } + }); + quote! { #(#params),* } + } else { + quote! {} + }; + + let archived = quote! {<#with as rkyv::with::ArchiveWith<#ident #ty_generics>>::Archived}; + let resolver = quote! {<#with as rkyv::with::ArchiveWith<#ident #ty_generics>>::Resolver}; + quote! { + #input + #[allow(non_upper_case_globals)] + const _: () = { + use #crate_path::__private::rkyv; + use rkyv::{ + rancor::Fallible, + with::{ArchiveWith, DeserializeWith, SerializeWith}, + Archive, Deserialize, Place, Serialize + }; + impl #impl_generics Archive for #ident #ty_generics #where_clause { + type Archived = #archived; + type Resolver = #resolver; + #[inline] + fn resolve(&self, resolver: Self::Resolver, out: Place) { + <#with as ArchiveWith<#ident #ty_generics>>::resolve_with(self, resolver, out) + } + } + impl<__S, #generic_params> Serialize<__S> for #ident #ty_generics + where + __S: Fallible + ?Sized, + #with: SerializeWith<#ident #ty_generics, __S>, + #where_params + { + #[inline] + fn serialize(&self, serializer: &mut __S) -> Result { + #with::serialize_with(self, serializer) + } + } + impl<__D, #generic_params> Deserialize<#ident #ty_generics, __D> for #archived + where + __D: Fallible + ?Sized, + #with: DeserializeWith<#archived, #ident #ty_generics, __D>, + #where_params + { + #[inline] + fn deserialize(&self, deserializer: &mut __D) -> Result<#ident #ty_generics, __D::Error> { + #with::deserialize_with(self, deserializer) + } + } + }; + } + .into() +} diff --git a/crates/rspack_macros/src/cacheable/mod.rs b/crates/rspack_macros/src/cacheable/mod.rs new file mode 100644 index 000000000000..f265139793d7 --- /dev/null +++ b/crates/rspack_macros/src/cacheable/mod.rs @@ -0,0 +1,26 @@ +mod args; +mod r#impl; +mod impl_as; +mod impl_disable; +mod impl_with; + +use args::CacheableArgs; +use impl_as::impl_cacheable_as; +use impl_with::impl_cacheable_with; +use r#impl::impl_cacheable; + +pub fn cacheable( + args: proc_macro::TokenStream, + tokens: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + let args = syn::parse_macro_input!(args as CacheableArgs); + if args.with.is_some() { + impl_cacheable_with(tokens, args) + } else if args.r#as.is_some() { + impl_cacheable_as(tokens, args) + } else { + impl_cacheable(tokens, args) + } +} + +pub use impl_disable::disable_cacheable; diff --git a/crates/rspack_macros/src/lib.rs b/crates/rspack_macros/src/lib.rs index f0157b8918b6..5b2363ae2003 100644 --- a/crates/rspack_macros/src/lib.rs +++ b/crates/rspack_macros/src/lib.rs @@ -69,12 +69,7 @@ pub fn cacheable( args: proc_macro::TokenStream, tokens: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - let args = syn::parse_macro_input!(args as cacheable::CacheableArgs); - if args.with.is_some() { - cacheable::impl_cacheable_with(tokens, args) - } else { - cacheable::impl_cacheable(tokens, args) - } + cacheable::cacheable(args, tokens) } #[proc_macro_attribute] diff --git a/crates/rspack_napi/src/callback.rs b/crates/rspack_napi/src/callback.rs index 9bf5bd9db223..149f0d8187ef 100644 --- a/crates/rspack_napi/src/callback.rs +++ b/crates/rspack_napi/src/callback.rs @@ -40,6 +40,7 @@ pub struct JsCallback { } unsafe impl Send for JsCallback {} +unsafe impl Sync for JsCallback {} impl JsCallback { /// # Safety diff --git a/crates/rspack_napi/src/threadsafe_function.rs b/crates/rspack_napi/src/threadsafe_function.rs index 76f8a2f10109..1ba71dadd89d 100644 --- a/crates/rspack_napi/src/threadsafe_function.rs +++ b/crates/rspack_napi/src/threadsafe_function.rs @@ -1,4 +1,8 @@ -use std::{fmt::Debug, marker::PhantomData, sync::Arc}; +use std::{ + fmt::Debug, + marker::PhantomData, + sync::{Arc, OnceLock}, +}; use napi::{ bindgen_prelude::{ @@ -15,10 +19,11 @@ use crate::{JsCallback, NapiErrorExt}; type ErrorResolver = dyn FnOnce(Env); +static ERROR_RESOLVER: OnceLock>> = OnceLock::new(); + pub struct ThreadsafeFunction { inner: Arc>, env: napi_env, - resolver: JsCallback>, _data: PhantomData, } @@ -33,7 +38,6 @@ impl Clone for ThreadsafeFunction { Self { inner: self.inner.clone(), env: self.env, - resolver: self.resolver.clone(), _data: self._data, } } @@ -49,10 +53,11 @@ impl FromNapiValue for ThreadsafeFunction< env, napi_val, ) }?; + let _ = ERROR_RESOLVER + .get_or_init(|| unsafe { JsCallback::new(env).expect("should initialize error resolver") }); Ok(Self { inner: Arc::new(inner), env, - resolver: unsafe { JsCallback::new(env) }?, _data: PhantomData, }) } @@ -61,10 +66,14 @@ impl FromNapiValue for ThreadsafeFunction< impl ThreadsafeFunction { async fn resolve_error(&self, err: napi::Error) -> Error { let (tx, rx) = tokio::sync::oneshot::channel::(); - self.resolver.call(Box::new(move |env| { - let err = err.into_rspack_error_with_detail(&env); - tx.send(err).expect("failed to resolve js error"); - })); + ERROR_RESOLVER + .get() + // SAFETY: The error resolver is initialized in `FromNapiValue::from_napi_value` and it's the only way to create a tsfn. + .expect("should have error resolver initialized") + .call(Box::new(move |env| { + let err = err.into_rspack_error_with_detail(&env); + tx.send(err).expect("failed to resolve js error"); + })); rx.await.expect("failed to resolve js error") } diff --git a/crates/rspack_plugin_asset/src/lib.rs b/crates/rspack_plugin_asset/src/lib.rs index 0dcaada3eed8..7a9174033cd9 100644 --- a/crates/rspack_plugin_asset/src/lib.rs +++ b/crates/rspack_plugin_asset/src/lib.rs @@ -102,6 +102,20 @@ impl AssetParserAndGenerator { } } + fn decode_data_uri_content(encoding: &str, content: &str, source: &BoxSource) -> Vec { + if encoding == "base64" + && let Some(cleaned) = rspack_base64::clean_base64(content) + { + return rspack_base64::decode_to_vec(cleaned.as_bytes()) + .unwrap_or_else(|_| source.buffer().to_vec()); + } + + match urlencoding::decode(content) { + Ok(decoded_content) => decoded_content.as_bytes().to_vec(), + Err(_) => content.as_bytes().to_vec(), + } + } + fn hash_for_source( &self, source: &BoxSource, @@ -184,7 +198,12 @@ impl AssetParserAndGenerator { encoding: &str, source: &BoxSource, ) -> Result { - if let Some(encoded_content) = &resource_data.encoded_content { + if let Some(encoded_content) = &resource_data.encoded_content + && let Some(resource_encoding) = &resource_data.encoding + && resource_encoding == encoding + && AssetParserAndGenerator::decode_data_uri_content(encoding, encoded_content, source) + .eq(&source.buffer().to_vec()) + { return Ok(encoded_content.to_owned()); } if encoding.is_empty() { @@ -593,13 +612,13 @@ async fn render_manifest( .get::() .expect("should have asset_info") .inner(); - RenderManifestEntry::new( - source.clone(), - asset_filename.to_owned(), - asset_info.to_owned(), - true, - true, - ) + RenderManifestEntry { + source: source.clone(), + filename: asset_filename.to_owned(), + has_filename: true, + info: asset_info.to_owned(), + auxiliary: true, + } }); Ok(result) diff --git a/crates/rspack_plugin_css/src/plugin/impl_plugin_for_css_plugin.rs b/crates/rspack_plugin_css/src/plugin/impl_plugin_for_css_plugin.rs index 010763480e30..fdf38638997d 100644 --- a/crates/rspack_plugin_css/src/plugin/impl_plugin_for_css_plugin.rs +++ b/crates/rspack_plugin_css/src/plugin/impl_plugin_for_css_plugin.rs @@ -5,7 +5,8 @@ use std::sync::Arc; use async_trait::async_trait; use rayon::prelude::*; -use rspack_core::rspack_sources::ReplaceSource; +use rspack_collections::DatabaseItem; +use rspack_core::rspack_sources::{BoxSource, CachedSource, ReplaceSource}; use rspack_core::{ get_css_chunk_filename_template, rspack_sources::{ConcatSource, RawSource, Source, SourceExt}, @@ -13,9 +14,9 @@ use rspack_core::{ SourceType, }; use rspack_core::{ - ChunkLoading, ChunkLoadingType, ChunkUkey, Compilation, CompilationContentHash, + AssetInfo, ChunkLoading, ChunkLoadingType, ChunkUkey, Compilation, CompilationContentHash, CompilationParams, CompilationRenderManifest, CompilationRuntimeRequirementInTree, - CompilerCompilation, CompilerOptions, DependencyType, LibIdentOptions, PublicPath, + CompilerCompilation, CompilerOptions, DependencyType, LibIdentOptions, ModuleGraph, PublicPath, RuntimeGlobals, SelfModuleFactory, }; use rspack_error::{Diagnostic, Result}; @@ -37,9 +38,9 @@ impl CssPlugin { fn get_chunk_unused_local_idents( compilation: &Compilation, chunk: &Chunk, - ordered_css_modules: &[&dyn Module], + css_modules: &[&dyn Module], ) -> HashSet { - ordered_css_modules + css_modules .iter() .filter_map(|module| { let module_id = &module.identifier(); @@ -55,6 +56,65 @@ impl CssPlugin { .collect() } + fn render_chunk( + &self, + compilation: &Compilation, + mg: &ModuleGraph, + chunk: &Chunk, + output_path: &str, + css_import_modules: Vec<&dyn Module>, + css_modules: Vec<&dyn Module>, + ) -> Result<(BoxSource, Vec)> { + let (ordered_css_modules, conflicts) = + Self::get_ordered_chunk_css_modules(chunk, compilation, css_import_modules, css_modules); + let source = Self::render_chunk_to_source(compilation, chunk, &ordered_css_modules)?; + + let content = source.source(); + let len = AUTO_PUBLIC_PATH_PLACEHOLDER.len(); + let auto_public_path_matches: Vec<_> = content + .match_indices(AUTO_PUBLIC_PATH_PLACEHOLDER) + .map(|(index, _)| (index, index + len)) + .collect(); + let source = if !auto_public_path_matches.is_empty() { + let mut replace = ReplaceSource::new(source); + for (start, end) in auto_public_path_matches { + let relative = PublicPath::render_auto_public_path(compilation, output_path); + replace.replace(start as u32, end as u32, &relative, None); + } + replace.boxed() + } else { + source.boxed() + }; + let mut diagnostics = vec![]; + if let Some(conflicts) = conflicts { + diagnostics.extend(conflicts.into_iter().map(|conflict| { + let chunk = compilation.chunk_by_ukey.expect_get(&conflict.chunk); + + let failed_module = mg + .module_by_identifier(&conflict.failed_module) + .expect("should have module"); + let selected_module = mg + .module_by_identifier(&conflict.selected_module) + .expect("should have module"); + + Diagnostic::warn( + "Conflicting order".into(), + format!( + "chunk {}\nConflicting order between {} and {}", + chunk + .name() + .unwrap_or(chunk.id().expect("should have chunk id")), + failed_module.readable_identifier(&compilation.options.context), + selected_module.readable_identifier(&compilation.options.context) + ), + ) + .with_file(Some(output_path.to_owned().into())) + .with_chunk(Some(chunk.ukey().as_u32())) + })); + } + Ok((source, diagnostics)) + } + fn render_chunk_to_source( compilation: &Compilation, chunk: &Chunk, @@ -203,12 +263,16 @@ async fn content_hash( ) -> Result<()> { let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); let module_graph = compilation.get_module_graph(); - let (ordered_modules, _) = Self::get_ordered_chunk_css_modules( - chunk, - &compilation.chunk_graph, - &module_graph, - compilation, - ); + let css_import_modules = compilation + .chunk_graph + .get_chunk_modules_iterable_by_source_type(chunk_ukey, SourceType::CssImport, &module_graph) + .collect::>(); + let css_modules = compilation + .chunk_graph + .get_chunk_modules_iterable_by_source_type(chunk_ukey, SourceType::Css, &module_graph) + .collect::>(); + let (ordered_modules, _) = + Self::get_ordered_chunk_css_modules(chunk, compilation, css_import_modules, css_modules); let mut hasher = hashes .entry(SourceType::Css) .or_insert_with(|| RspackHash::from(&compilation.options.output)); @@ -246,27 +310,27 @@ async fn render_manifest( return Ok(()); } let module_graph = compilation.get_module_graph(); - let (ordered_css_modules, conflicts) = Self::get_ordered_chunk_css_modules( - chunk, - &compilation.chunk_graph, - &module_graph, - compilation, - ); - - // Prevent generating css files for chunks which don't contain css modules. - if ordered_css_modules.is_empty() { + let css_import_modules = compilation + .chunk_graph + .get_chunk_modules_iterable_by_source_type(chunk_ukey, SourceType::CssImport, &module_graph) + .collect::>(); + let css_modules = compilation + .chunk_graph + .get_chunk_modules_iterable_by_source_type(chunk_ukey, SourceType::Css, &module_graph) + .collect::>(); + if css_import_modules.is_empty() && css_modules.is_empty() { return Ok(()); } - let source = Self::render_chunk_to_source(compilation, chunk, &ordered_css_modules)?; - let unused_idents = Self::get_chunk_unused_local_idents(compilation, chunk, &ordered_css_modules); - let filename_template = get_css_chunk_filename_template( chunk, &compilation.options.output, &compilation.chunk_group_by_ukey, ); - let (output_path, mut asset_info) = compilation.get_path_with_info( + let mut asset_info = AssetInfo::default(); + let unused_idents = Self::get_chunk_unused_local_idents(compilation, chunk, &css_modules); + asset_info.set_css_unused_idents(unused_idents); + let output_path = compilation.get_path_with_info( filename_template, PathData::default() .chunk_id_optional(chunk.id()) @@ -281,59 +345,33 @@ async fn render_manifest( compilation.options.output.hash_digest_length, )) .runtime(chunk.runtime().as_str()), + &mut asset_info, )?; - asset_info.set_css_unused_idents(unused_idents); - let content = source.source(); - let len = AUTO_PUBLIC_PATH_PLACEHOLDER.len(); - let auto_public_path_matches: Vec<_> = content - .match_indices(AUTO_PUBLIC_PATH_PLACEHOLDER) - .map(|(index, _)| (index, index + len)) - .collect(); - let source = if !auto_public_path_matches.is_empty() { - let mut replace = ReplaceSource::new(source); - for (start, end) in auto_public_path_matches { - let relative = PublicPath::render_auto_public_path(compilation, &output_path); - replace.replace(start as u32, end as u32, &relative, None); - } - replace.boxed() - } else { - source.boxed() - }; - if let Some(conflicts) = conflicts { - diagnostics.extend(conflicts.into_iter().map(|conflict| { - let chunk = compilation.chunk_by_ukey.expect_get(&conflict.chunk); - let mg = compilation.get_module_graph(); - - let failed_module = mg - .module_by_identifier(&conflict.failed_module) - .expect("should have module"); - let selected_module = mg - .module_by_identifier(&conflict.selected_module) - .expect("should have module"); - - Diagnostic::warn( - "Conflicting order".into(), - format!( - "chunk {}\nConflicting order between {} and {}", - chunk - .name() - .unwrap_or(chunk.id().expect("should have chunk id")), - failed_module.readable_identifier(&compilation.options.context), - selected_module.readable_identifier(&compilation.options.context) - ), - ) - .with_file(Some(output_path.to_owned().into())) - .with_chunk(Some(chunk_ukey.as_u32())) - })); - } - manifest.push(RenderManifestEntry::new( - source.boxed(), - output_path, - asset_info, - false, - false, - )); + let (source, more_diagnostics) = compilation + .old_cache + .chunk_render_occasion + .use_cache(compilation, chunk, &SourceType::Css, || async { + let (source, diagnostics) = self.render_chunk( + compilation, + &module_graph, + chunk, + &output_path, + css_import_modules, + css_modules, + )?; + Ok((CachedSource::new(source).boxed(), diagnostics)) + }) + .await?; + + diagnostics.extend(more_diagnostics); + manifest.push(RenderManifestEntry { + source: source.boxed(), + filename: output_path, + has_filename: false, + info: asset_info, + auxiliary: false, + }); Ok(()) } diff --git a/crates/rspack_plugin_css/src/plugin/mod.rs b/crates/rspack_plugin_css/src/plugin/mod.rs index 100c916c9d23..0f552f81db67 100644 --- a/crates/rspack_plugin_css/src/plugin/mod.rs +++ b/crates/rspack_plugin_css/src/plugin/mod.rs @@ -3,7 +3,7 @@ mod impl_plugin_for_css_plugin; use std::cmp::{self, Reverse}; use rspack_collections::{DatabaseItem, IdentifierSet}; -use rspack_core::{Chunk, ChunkGraph, Compilation, Module, ModuleGraph, SourceType}; +use rspack_core::{Chunk, Compilation, Module}; use rspack_core::{ChunkUkey, ModuleIdentifier}; use rspack_hook::plugin; @@ -19,31 +19,18 @@ pub struct CssOrderConflicts { } impl CssPlugin { - pub(crate) fn get_ordered_chunk_css_modules<'chunk_graph>( + pub(crate) fn get_ordered_chunk_css_modules<'a>( chunk: &Chunk, - chunk_graph: &'chunk_graph ChunkGraph, - module_graph: &'chunk_graph ModuleGraph, compilation: &Compilation, - ) -> ( - Vec<&'chunk_graph dyn Module>, - Option>, - ) { + mut css_import_modules: Vec<&'a dyn Module>, + mut css_modules: Vec<&'a dyn Module>, + ) -> (Vec<&'a dyn Module>, Option>) { + css_import_modules.sort_unstable_by_key(|module| module.identifier()); let (mut external_css_modules, conflicts_external) = - Self::get_ordered_chunk_css_modules_by_type( - chunk, - chunk_graph, - module_graph, - compilation, - SourceType::CssImport, - ); + Self::get_modules_in_order(chunk, css_import_modules, compilation); - let (mut css_modules, conflicts) = Self::get_ordered_chunk_css_modules_by_type( - chunk, - chunk_graph, - module_graph, - compilation, - SourceType::Css, - ); + css_modules.sort_unstable_by_key(|module| module.identifier()); + let (mut css_modules, conflicts) = Self::get_modules_in_order(chunk, css_modules, compilation); external_css_modules.append(&mut css_modules); @@ -60,27 +47,6 @@ impl CssPlugin { (external_css_modules, conflicts) } - fn get_ordered_chunk_css_modules_by_type<'chunk_graph>( - chunk: &Chunk, - chunk_graph: &'chunk_graph ChunkGraph, - module_graph: &'chunk_graph ModuleGraph, - compilation: &Compilation, - source_type: SourceType, - ) -> ( - Vec<&'chunk_graph dyn Module>, - Option>, - ) { - // Align with https://github.com/webpack/webpack/blob/8241da7f1e75c5581ba535d127fa66aeb9eb2ac8/lib/css/CssModulesPlugin.js#L368 - let mut css_modules = chunk_graph - .get_chunk_modules_iterable_by_source_type(&chunk.ukey(), source_type, module_graph) - .collect::>(); - css_modules.sort_unstable_by_key(|module| module.identifier()); - - let (css_modules, conflicts) = Self::get_modules_in_order(chunk, css_modules, compilation); - - (css_modules, conflicts) - } - pub fn get_modules_in_order<'module>( chunk: &Chunk, modules: Vec<&'module dyn Module>, diff --git a/crates/rspack_plugin_extract_css/src/plugin.rs b/crates/rspack_plugin_extract_css/src/plugin.rs index 7ab1b7ed3a3a..21ecd7d69609 100644 --- a/crates/rspack_plugin_extract_css/src/plugin.rs +++ b/crates/rspack_plugin_extract_css/src/plugin.rs @@ -4,7 +4,7 @@ use std::{borrow::Cow, cmp::max, hash::Hash, sync::Arc}; use cow_utils::CowUtils; use regex::Regex; use rspack_collections::{DatabaseItem, IdentifierMap, IdentifierSet, UkeySet}; -use rspack_core::ChunkGraph; +use rspack_core::rspack_sources::{BoxSource, CachedSource, SourceExt}; use rspack_core::{ rspack_sources::{ConcatSource, RawSource, SourceMap, SourceMapSource, WithoutOriginalOptions}, ApplyContext, Chunk, ChunkGroupUkey, ChunkKind, ChunkUkey, Compilation, CompilationContentHash, @@ -13,6 +13,7 @@ use rspack_core::{ ModuleType, NormalModuleFactoryParser, ParserAndGenerator, ParserOptions, PathData, Plugin, PluginContext, RenderManifestEntry, RuntimeGlobals, SourceType, }; +use rspack_core::{AssetInfo, ChunkGraph}; use rspack_error::{Diagnostic, Result}; use rspack_hash::RspackHash; use rspack_hook::{plugin, plugin_hook}; @@ -116,7 +117,7 @@ impl PluginCssExtract { fn sort_modules<'comp>( &self, chunk: &Chunk, - modules: Vec<&dyn Module>, + modules: &[&dyn Module], compilation: &'comp Compilation, module_graph: &'comp ModuleGraph<'comp>, ) -> (Vec<&'comp dyn Module>, Option>) { @@ -297,17 +298,67 @@ impl PluginCssExtract { async fn render_content_asset<'comp>( &self, chunk: &Chunk, - rendered_modules: Vec<&dyn Module>, - filename_template: &Filename, + rendered_modules: &[&dyn Module], + filename: &str, compilation: &'comp Compilation, - path_data: PathData<'comp>, - ) -> Result<(RenderManifestEntry, Option>)> { + ) -> (BoxSource, Vec) { let module_graph = compilation.get_module_graph(); // mini-extract-plugin has different conflict order in some cases, // for compatibility, we cannot use experiments.css sorting algorithm let (used_modules, conflicts) = self.sort_modules(chunk, rendered_modules, compilation, &module_graph); + let mut diagnostics = Vec::new(); + if let Some(conflicts) = conflicts { + diagnostics.extend(conflicts.into_iter().map(|conflict| { + let chunk = compilation.chunk_by_ukey.expect_get(&conflict.chunk); + let fallback_module = module_graph + .module_by_identifier(&conflict.fallback_module) + .expect("should have module"); + + Diagnostic::warn( + "".into(), + format!( + r#"chunk {} [{PLUGIN_NAME}] +Conflicting order. Following module has been added: + * {} +despite it was not able to fulfill desired ordering with these modules: +{}"#, + chunk.name().unwrap_or(chunk.id().unwrap_or_default()), + fallback_module.readable_identifier(&compilation.options.context), + conflict + .reasons + .iter() + .map(|(m, failed_reasons, good_reasons)| { + let m = module_graph + .module_by_identifier(m) + .expect("should have module"); + + format!( + " * {}\n - couldn't fulfill desired order of chunk group(s) {}{}", + m.readable_identifier(&compilation.options.context), + failed_reasons + .as_ref() + .map(|s| s.as_str()) + .unwrap_or_default(), + good_reasons + .as_ref() + .map(|s| format!( + "\n - while fulfilling desired order of chunk group(s) {}", + s.as_str() + )) + .unwrap_or_default(), + ) + }) + .collect::>() + .join("\n") + ), + ) + .with_file(Some(filename.to_owned().into())) + .with_chunk(Some(chunk.ukey().as_u32())) + })); + } + let used_modules = used_modules .into_iter() .filter_map(|module| module.downcast_ref::()); @@ -315,8 +366,6 @@ impl PluginCssExtract { let mut source = ConcatSource::default(); let mut external_source = ConcatSource::default(); - let (filename, asset_info) = compilation.get_path_with_info(filename_template, path_data)?; - for module in used_modules { let content = Cow::Borrowed(module.content.as_str()); let readable_identifier = module.readable_identifier(&compilation.options.context); @@ -368,7 +417,7 @@ impl PluginCssExtract { source.add(RawSource::from(format!("@layer {} {{\n", layer))); } - let undo_path = get_undo_path(&filename, compilation.options.output.path.as_str(), false); + let undo_path = get_undo_path(filename, compilation.options.output.path.as_str(), false); let content = content.cow_replace(ABSOLUTE_PUBLIC_PATH, ""); let content = content.cow_replace(SINGLE_DOT_PATH_SEGMENT, "."); @@ -408,16 +457,7 @@ impl PluginCssExtract { } external_source.add(source); - Ok(( - RenderManifestEntry::new( - Arc::new(external_source), - filename, - asset_info, - false, - false, - ), - conflicts, - )) + (external_source.boxed(), diagnostics) } } @@ -568,74 +608,43 @@ async fn render_manifest( &self.options.chunk_filename }; - let (render_result, conflicts) = self - .render_content_asset( - chunk, - rendered_modules, - filename_template, - compilation, - PathData::default() - .chunk_id_optional(chunk.id()) - .chunk_hash_optional(chunk.rendered_hash( - &compilation.chunk_hashes_results, - compilation.options.output.hash_digest_length, - )) - .chunk_name_optional(chunk.name_for_filename_template()) - .content_hash_optional(chunk.rendered_content_hash_by_source_type( - &compilation.chunk_hashes_results, - &SOURCE_TYPE[0], - compilation.options.output.hash_digest_length, - )), - ) + let mut asset_info = AssetInfo::default(); + let filename = compilation.get_path_with_info( + filename_template, + PathData::default() + .chunk_id_optional(chunk.id()) + .chunk_hash_optional(chunk.rendered_hash( + &compilation.chunk_hashes_results, + compilation.options.output.hash_digest_length, + )) + .chunk_name_optional(chunk.name_for_filename_template()) + .content_hash_optional(chunk.rendered_content_hash_by_source_type( + &compilation.chunk_hashes_results, + &SOURCE_TYPE[0], + compilation.options.output.hash_digest_length, + )), + &mut asset_info, + )?; + + let (source, more_diagnostics) = compilation + .old_cache + .chunk_render_occasion + .use_cache(compilation, chunk, &SOURCE_TYPE[0], || async { + let (source, diagnostics) = self + .render_content_asset(chunk, &rendered_modules, &filename, compilation) + .await; + Ok((CachedSource::new(source).boxed(), diagnostics)) + }) .await?; - if let Some(conflicts) = conflicts { - diagnostics.extend(conflicts.into_iter().map(|conflict| { - let chunk = compilation.chunk_by_ukey.expect_get(&conflict.chunk); - let fallback_module = module_graph - .module_by_identifier(&conflict.fallback_module) - .expect("should have module"); - - Diagnostic::warn( - "".into(), - format!( - "chunk {} [{PLUGIN_NAME}]\nConflicting order. Following module has been added:\n * {} -despite it was not able to fulfill desired ordering with these modules:\n{}", - chunk.name().unwrap_or(chunk.id().unwrap_or_default()), - fallback_module.readable_identifier(&compilation.options.context), - conflict - .reasons - .iter() - .map(|(m, failed_reasons, good_reasons)| { - let m = module_graph - .module_by_identifier(m) - .expect("should have module"); - - format!( - " * {}\n - couldn't fulfill desired order of chunk group(s) {}{}", - m.readable_identifier(&compilation.options.context), - failed_reasons - .as_ref() - .map(|s| s.as_str()) - .unwrap_or_default(), - good_reasons - .as_ref() - .map(|s| format!( - "\n - while fulfilling desired order of chunk group(s) {}", - s.as_str() - )) - .unwrap_or_default(), - ) - }) - .collect::>() - .join("\n") - ), - ) - .with_file(Some(render_result.filename().to_owned().into())) - .with_chunk(Some(chunk_ukey.as_u32())) - })); - } - manifest.push(render_result); + diagnostics.extend(more_diagnostics); + manifest.push(RenderManifestEntry { + source, + filename, + has_filename: false, + info: asset_info, + auxiliary: false, + }); Ok(()) } diff --git a/crates/rspack_plugin_hmr/src/lib.rs b/crates/rspack_plugin_hmr/src/lib.rs index 42f9c5a1be7b..49b844a40a03 100644 --- a/crates/rspack_plugin_hmr/src/lib.rs +++ b/crates/rspack_plugin_hmr/src/lib.rs @@ -258,8 +258,8 @@ async fn process_assets(&self, compilation: &mut Compilation) -> Result<()> { compilation.extend_diagnostics(diagnostics); for entry in manifest { - let filename = if entry.has_filename() { - entry.filename().to_string() + let filename = if entry.has_filename { + entry.filename.to_string() } else { compilation .get_path( diff --git a/crates/rspack_plugin_html/src/asset.rs b/crates/rspack_plugin_html/src/asset.rs index 80462f3d053a..525e01c2315d 100644 --- a/crates/rspack_plugin_html/src/asset.rs +++ b/crates/rspack_plugin_html/src/asset.rs @@ -13,7 +13,7 @@ use rayon::prelude::*; use rspack_core::{ parse_to_url, rspack_sources::{RawSource, SourceExt}, - Compilation, CompilationAsset, Filename, NoFilenameFn, PathData, + AssetInfo, Compilation, CompilationAsset, Filename, NoFilenameFn, PathData, }; use rspack_error::{miette, AnyhowError}; use rspack_paths::Utf8PathBuf; @@ -343,12 +343,14 @@ pub fn create_html_asset( ) -> (String, CompilationAsset) { let hash = hash_for_source(html); - let (output_path, asset_info) = compilation + let mut asset_info = AssetInfo::default(); + let output_path = compilation .get_path_with_info( output_file_name, PathData::default() .filename(template_file_name) .content_hash(&hash), + &mut asset_info, ) .always_ok(); diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/amd_define_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/amd_define_dependency.rs new file mode 100644 index 000000000000..41b08089f470 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/amd_define_dependency.rs @@ -0,0 +1,302 @@ +use bitflags::bitflags; +use rspack_core::{ + AffectType, AsContextDependency, AsModuleDependency, Compilation, Dependency, DependencyCategory, + DependencyId, DependencyTemplate, DependencyType, RuntimeGlobals, RuntimeSpec, TemplateContext, + TemplateReplaceSource, +}; +use rspack_util::{atom::Atom, json_stringify}; + +use super::local_module::LocalModule; + +bitflags! { + #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] + struct Branch: u8 { + const L = 1 << 0; + const A = 1 << 1; + const O = 1 << 2; + const F = 1 << 3; + } +} + +impl Branch { + pub fn get_requests(&self) -> RuntimeGlobals { + match *self { + f if f == Branch::F => { + RuntimeGlobals::REQUIRE | RuntimeGlobals::EXPORTS | RuntimeGlobals::MODULE + } + o if o == Branch::O => RuntimeGlobals::MODULE, + o_f if o_f == (Branch::O | Branch::F) => { + RuntimeGlobals::REQUIRE | RuntimeGlobals::EXPORTS | RuntimeGlobals::MODULE + } + a_f if a_f == (Branch::A | Branch::F) => RuntimeGlobals::EXPORTS | RuntimeGlobals::MODULE, + a_o if a_o == (Branch::A | Branch::O) => RuntimeGlobals::MODULE, + a_o_f if a_o_f == (Branch::A | Branch::O | Branch::F) => { + RuntimeGlobals::EXPORTS | RuntimeGlobals::MODULE + } + l_f if l_f == (Branch::L | Branch::F) => RuntimeGlobals::REQUIRE | RuntimeGlobals::MODULE, + l_o if l_o == (Branch::L | Branch::O) => RuntimeGlobals::empty(), + l_o_f if l_o_f == (Branch::L | Branch::O | Branch::F) => { + RuntimeGlobals::REQUIRE | RuntimeGlobals::MODULE + } + l_a_f if l_a_f == (Branch::L | Branch::A | Branch::F) => RuntimeGlobals::empty(), + l_a_o if l_a_o == (Branch::L | Branch::A | Branch::O) => RuntimeGlobals::empty(), + l_a_o_f if l_a_o_f == (Branch::L | Branch::A | Branch::O | Branch::F) => { + RuntimeGlobals::empty() + } + _ => RuntimeGlobals::empty(), + } + } + + pub fn get_definition(&self, local_module_var: &Option) -> String { + let name = match local_module_var { + Some(name) => name, + None => "XXX", + }; + match *self { + f if f == Branch::F => "var __WEBPACK_AMD_DEFINE_RESULT__;".to_string(), + o if o == Branch::O => "".to_string(), + o_f if o_f == (Branch::O | Branch::F) => { + "var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_RESULT__;".to_string() + } + a_f if a_f == (Branch::A | Branch::F) => { + "var __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;".to_string() + } + a_o if a_o == (Branch::A | Branch::O) => "".to_string(), + a_o_f if a_o_f == (Branch::A | Branch::O | Branch::F) => "var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;".to_string(), + l_f if l_f == (Branch::L | Branch::F) => { + format!("var {}, {}module;", name, name) + }, + l_o if l_o == (Branch::L | Branch::O) => { + format!("var {};", name) + }, + l_o_f if l_o_f == (Branch::L | Branch::O | Branch::F) => { + format!("var {}, {}factory, {}module;", name, name, name) + }, + l_a_f if l_a_f == (Branch::L | Branch::A | Branch::F) => { + format!("var __WEBPACK_AMD_DEFINE_ARRAY__, {}, {}exports;", name, name) + }, + l_a_o if l_a_o == (Branch::L | Branch::A | Branch::O)=> { + format!("var {};", name) + }, + l_a_o_f if l_a_o_f == (Branch::L | Branch::A | Branch::O | Branch::F) => { + format!("var {}array, {}factory, {}exports, {};", name, name, name, name) + }, + _ => "".to_string(), + } + } + + pub fn get_content( + &self, + local_module_var: &Option, + named_module: &Option, + ) -> String { + let local_module_var = match local_module_var { + Some(name) => name, + None => "XXX", + }; + let named_module = match named_module { + Some(name) => name, + None => "YYY", + }; + match *self { + f if f == Branch::F => { + format!( + "!(__WEBPACK_AMD_DEFINE_RESULT__ = (#).call(exports, {require}, exports, module), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__))", + require = RuntimeGlobals::REQUIRE.name() + ) + } + o if o == Branch::O => "!(module.exports = #)".to_string(), + o_f if o_f == (Branch::O | Branch::F) => { + format!( + "!(__WEBPACK_AMD_DEFINE_FACTORY__ = (#), + __WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ? + (__WEBPACK_AMD_DEFINE_FACTORY__.call(exports, {require}, exports, module)) : + __WEBPACK_AMD_DEFINE_FACTORY__), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__))", + require = RuntimeGlobals::REQUIRE.name() + ) + } + a_f if a_f == (Branch::A | Branch::F) => "!(__WEBPACK_AMD_DEFINE_ARRAY__ = #, __WEBPACK_AMD_DEFINE_RESULT__ = (#).apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__))".to_string(), + a_o if a_o == (Branch::A | Branch::O) => "!(#, module.exports = #)".to_string(), + a_o_f if a_o_f == (Branch::A | Branch::O | Branch::F) => { + "!(__WEBPACK_AMD_DEFINE_ARRAY__ = #, __WEBPACK_AMD_DEFINE_FACTORY__ = (#), + __WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ? + (__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__), + __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__))".to_string() + } + l_f if l_f == (Branch::L | Branch::F) => { + format!( + "!({var_name}module = {{ id: {module_id}, exports: {{}}, loaded: false }}, {var_name} = (#).call({var_name}module.exports, {require}, {var_name}module.exports, {var_name}module), {var_name}module.loaded = true, {var_name} === undefined && ({var_name} = {var_name}module.exports))", + var_name = local_module_var, + module_id = json_stringify(named_module), + require = RuntimeGlobals::REQUIRE.name(), + ) + } + l_o if l_o == (Branch::L | Branch::O) => format!("!({} = #)", local_module_var), + l_o_f if l_o_f == (Branch::L | Branch::O | Branch::F) => { + format!( + "!({var_name}factory = (#), (typeof {var_name}factory === 'function' ? (({var_name}module = {{ id: {module_id}, exports: {{}}, loaded: false }}), ({var_name} = {var_name}factory.call({var_name}module.exports, {require}, {var_name}module.exports, {var_name}module)), ({var_name}module.loaded = true), {var_name} === undefined && ({var_name} = {var_name}module.exports)) : {var_name} = {var_name}factory))", + var_name = local_module_var, + module_id = json_stringify(named_module), + require = RuntimeGlobals::REQUIRE.name(), + ) + } + l_a_f if l_a_f == (Branch::L | Branch::A | Branch::F) => format!("!(__WEBPACK_AMD_DEFINE_ARRAY__ = #, {} = (#).apply({}exports = {{}}, __WEBPACK_AMD_DEFINE_ARRAY__), {} === undefined && ({} = {}exports))", local_module_var, local_module_var, local_module_var, local_module_var, local_module_var), + l_a_o if l_a_o == (Branch::L | Branch::A | Branch::O) => format!("!(#, {} = #)", local_module_var), + l_a_o_f if l_a_o_f == (Branch::L | Branch::A | Branch::O | Branch::F) => format!( + "!({var_name}array = #, {var_name}factory = (#), + (typeof {var_name}factory === 'function' ? + (({var_name} = {var_name}factory.apply({var_name}exports = {{}}, {var_name}array)), {var_name} === undefined && ({var_name} = {var_name}exports)) : + ({var_name} = {var_name}factory) + ))", + var_name = local_module_var, + ), + _ => "".to_string(), + } + } +} + +#[derive(Debug, Clone)] +pub struct AmdDefineDependency { + id: DependencyId, + range: (u32, u32), + array_range: Option<(u32, u32)>, + function_range: Option<(u32, u32)>, + object_range: Option<(u32, u32)>, + named_module: Option, + local_module: Option, +} + +impl AmdDefineDependency { + pub fn new( + range: (u32, u32), + array_range: Option<(u32, u32)>, + function_range: Option<(u32, u32)>, + object_range: Option<(u32, u32)>, + named_module: Option, + local_module: Option, + ) -> Self { + Self { + id: DependencyId::new(), + range, + array_range, + function_range, + object_range, + named_module, + local_module, + } + } + + pub fn get_local_module_mut(&mut self) -> Option<&mut LocalModule> { + self.local_module.as_mut() + } +} + +impl Dependency for AmdDefineDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn category(&self) -> &DependencyCategory { + &DependencyCategory::Amd + } + + fn dependency_type(&self) -> &DependencyType { + &DependencyType::AmdDefine + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::False + } +} + +impl AmdDefineDependency { + fn local_module_var(&self) -> Option { + self.local_module.as_ref().and_then(|m| { + if m.is_used() { + Some(m.variable_name()) + } else { + None + } + }) + } + + fn branch(&self) -> Branch { + let mut ret = Branch::empty(); + if self.local_module.as_ref().is_some_and(|m| m.is_used()) { + ret |= Branch::L; + } + if self.array_range.is_some() { + ret |= Branch::A; + } + if self.object_range.is_some() { + ret |= Branch::O; + } + if self.function_range.is_some() { + ret |= Branch::F; + } + ret + } +} + +impl DependencyTemplate for AmdDefineDependency { + fn apply( + &self, + source: &mut TemplateReplaceSource, + code_generatable_context: &mut TemplateContext, + ) { + let branch = self.branch(); + code_generatable_context + .runtime_requirements + .insert(branch.get_requests()); + + let local_module_var = self.local_module_var(); + + let text = branch.get_content(&local_module_var, &self.named_module); + let definition = branch.get_definition(&local_module_var); + + let mut texts = text.split('#'); + + if !definition.is_empty() { + source.insert(0, &definition, None); + } + + let mut current = self.range.0; + if let Some(array_range) = self.array_range { + source.replace(current, array_range.0, texts.next().unwrap_or(""), None); + current = array_range.1; + } + + if let Some(object_range) = self.object_range { + source.replace(current, object_range.0, texts.next().unwrap_or(""), None); + current = object_range.1; + } else if let Some(function_range) = self.function_range { + source.replace(current, function_range.0, texts.next().unwrap_or(""), None); + current = function_range.1; + } + + source.replace(current, self.range.1, texts.next().unwrap_or(""), None); + + if texts.next().is_some() { + panic!("Implementation error"); + } + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl AsModuleDependency for AmdDefineDependency {} + +impl AsContextDependency for AmdDefineDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_array_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_array_dependency.rs new file mode 100644 index 000000000000..4772ebaadc3f --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_array_dependency.rs @@ -0,0 +1,111 @@ +use itertools::Itertools; +use rspack_core::{ + module_raw, AffectType, AsContextDependency, AsModuleDependency, Compilation, Dependency, + DependencyCategory, DependencyId, DependencyTemplate, DependencyType, ModuleDependency, + RuntimeSpec, TemplateContext, TemplateReplaceSource, +}; +use rspack_util::atom::Atom; + +use super::{ + amd_require_item_dependency::AMDRequireItemDependency, + local_module_dependency::LocalModuleDependency, +}; + +#[derive(Debug, Clone)] +pub enum AmdDep { + String(Atom), + LocalModuleDependency(LocalModuleDependency), + AMDRequireItemDependency(AMDRequireItemDependency), +} + +#[derive(Debug, Clone)] +pub struct AmdRequireArrayDependency { + id: DependencyId, + deps_array: Vec, + range: (u32, u32), +} + +impl AmdRequireArrayDependency { + pub fn new(deps_array: Vec, range: (u32, u32)) -> Self { + Self { + id: DependencyId::new(), + deps_array, + range, + } + } +} + +impl Dependency for AmdRequireArrayDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn category(&self) -> &DependencyCategory { + &DependencyCategory::Amd + } + + fn dependency_type(&self) -> &DependencyType { + &DependencyType::AmdRequireArray + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::False + } +} + +impl AmdRequireArrayDependency { + fn get_content(&self, code_generatable_context: &mut TemplateContext) -> String { + format!( + "[{}]", + self + .deps_array + .iter() + .map(|dep| Self::content_for_dependency(dep, code_generatable_context)) + .join(", ") + ) + } + + fn content_for_dependency( + dep: &AmdDep, + code_generatable_context: &mut TemplateContext, + ) -> String { + match dep { + AmdDep::String(name) => name.to_string(), + AmdDep::LocalModuleDependency(dep) => dep.get_variable_name(), + AmdDep::AMDRequireItemDependency(dep) => module_raw( + code_generatable_context.compilation, + code_generatable_context.runtime_requirements, + dep.id(), + dep.request(), + dep.weak(), + ), + } + } +} + +impl DependencyTemplate for AmdRequireArrayDependency { + fn apply( + &self, + source: &mut TemplateReplaceSource, + code_generatable_context: &mut TemplateContext, + ) { + let content = self.get_content(code_generatable_context); + source.replace(self.range.0, self.range.1, &content, None); + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl AsModuleDependency for AmdRequireArrayDependency {} + +impl AsContextDependency for AmdRequireArrayDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_dependency.rs new file mode 100644 index 000000000000..fb121e64c953 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_dependency.rs @@ -0,0 +1,199 @@ +use rspack_core::{ + block_promise, AffectType, AsContextDependency, AsModuleDependency, Compilation, Dependency, + DependencyCategory, DependencyId, DependencyTemplate, DependencyType, RuntimeGlobals, + RuntimeSpec, +}; + +#[derive(Debug, Clone)] +pub struct AMDRequireDependency { + id: DependencyId, + outer_range: (u32, u32), + // In the webpack source code, type annotation of `arrayRange` is non-null. + // However, `DependencyTemplate` implementation assumes `arrayRange` can be null in some cases. + // So I use Option here. + array_range: Option<(u32, u32)>, + function_range: Option<(u32, u32)>, + error_callback_range: Option<(u32, u32)>, + pub function_bind_this: bool, + pub error_callback_bind_this: bool, +} + +impl AMDRequireDependency { + pub fn new( + outer_range: (u32, u32), + array_range: Option<(u32, u32)>, + function_range: Option<(u32, u32)>, + error_callback_range: Option<(u32, u32)>, + ) -> Self { + Self { + id: DependencyId::new(), + outer_range, + array_range, + function_range, + error_callback_range, + function_bind_this: false, + error_callback_bind_this: false, + } + } +} + +impl Dependency for AMDRequireDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn category(&self) -> &DependencyCategory { + &DependencyCategory::Amd + } + + fn dependency_type(&self) -> &DependencyType { + &DependencyType::AmdRequire + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::False + } +} + +impl DependencyTemplate for AMDRequireDependency { + fn apply( + &self, + source: &mut rspack_core::TemplateReplaceSource, + code_generatable_context: &mut rspack_core::TemplateContext, + ) { + let module_graph = code_generatable_context.compilation.get_module_graph(); + let block = module_graph.get_parent_block(&self.id); + + let promise = block_promise( + block, + code_generatable_context.runtime_requirements, + code_generatable_context.compilation, + "AMD require", + ); + + // has array range but no function range + if let Some(array_range) = self.array_range + && self.function_range.is_none() + { + let start_block = promise + ".then(function() {"; + let end_block = format!( + ";}})['catch']{}", + RuntimeGlobals::UNCAUGHT_ERROR_HANDLER.name() + ); + code_generatable_context + .runtime_requirements + .insert(RuntimeGlobals::UNCAUGHT_ERROR_HANDLER); + source.replace(self.outer_range.0, array_range.0, &start_block, None); + source.replace(array_range.1, self.outer_range.1, &end_block, None); + return; + } + + // has function range but no array range + if let Some(function_range) = self.function_range + && self.array_range.is_none() + { + let start_block = promise + ".then(("; + let end_block = format!( + ").bind(exports, {}, exports, module))['catch']({})", + RuntimeGlobals::REQUIRE.name(), + RuntimeGlobals::UNCAUGHT_ERROR_HANDLER.name() + ); + code_generatable_context + .runtime_requirements + .insert(RuntimeGlobals::UNCAUGHT_ERROR_HANDLER); + source.replace(self.outer_range.0, function_range.0, &start_block, None); + source.replace(function_range.1, self.outer_range.1, &end_block, None); + return; + } + + // has array range, function range, and errorCallbackRange + if let Some(array_range) = self.array_range + && let Some(function_range) = self.function_range + && let Some(error_callback_range) = self.error_callback_range + { + let start_block = promise + ".then(function() { "; + let error_range_block = if self.function_bind_this { + "}.bind(this))['catch'](" + } else { + "})['catch'](" + }; + let end_block = if self.error_callback_bind_this { + ".bind(this))" + } else { + ")" + }; + + source.replace(self.outer_range.0, array_range.0, &start_block, None); + + source.insert(array_range.0, "var __WEBPACK_AMD_REQUIRE_ARRAY__ = ", None); + + source.replace(array_range.1, function_range.0, "; (", None); + + source.insert( + function_range.1, + ").apply(null, __WEBPACK_AMD_REQUIRE_ARRAY__);", + None, + ); + + source.replace( + function_range.1, + error_callback_range.0, + error_range_block, + None, + ); + + source.replace(error_callback_range.1, self.outer_range.1, end_block, None); + + return; + } + + // has array range, function range, but no errorCallbackRange + if let Some(array_range) = self.array_range + && let Some(function_range) = self.function_range + { + let start_block = promise + ".then(function() { "; + let end_block = format!( + "}}{})['catch']({})", + if self.function_bind_this { + ".bind(this)" + } else { + "" + }, + RuntimeGlobals::UNCAUGHT_ERROR_HANDLER.name() + ); + code_generatable_context + .runtime_requirements + .insert(RuntimeGlobals::UNCAUGHT_ERROR_HANDLER); + + source.replace(self.outer_range.0, array_range.0, &start_block, None); + + source.insert(array_range.0, "var __WEBPACK_AMD_REQUIRE_ARRAY__ = ", None); + + source.replace(array_range.1, function_range.0, "; (", None); + + source.insert( + function_range.1, + ").apply(null, __WEBPACK_AMD_REQUIRE_ARRAY__);", + None, + ); + + source.replace(function_range.1, self.outer_range.1, &end_block, None); + }; + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl AsModuleDependency for AMDRequireDependency {} + +impl AsContextDependency for AMDRequireDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_item_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_item_dependency.rs new file mode 100644 index 000000000000..b30461cb7072 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/amd_require_item_dependency.rs @@ -0,0 +1,89 @@ +use rspack_core::{ + module_raw, AffectType, AsContextDependency, Compilation, Dependency, DependencyCategory, + DependencyId, DependencyTemplate, DependencyType, ModuleDependency, RuntimeSpec, TemplateContext, + TemplateReplaceSource, +}; +use rspack_util::atom::Atom; + +#[derive(Debug, Clone)] +pub struct AMDRequireItemDependency { + id: DependencyId, + request: Atom, + range: (u32, u32), + optional: bool, +} + +impl AMDRequireItemDependency { + pub fn new(request: Atom, range: (u32, u32)) -> Self { + Self { + id: DependencyId::new(), + request, + range, + optional: false, + } + } + + pub fn set_optional(&mut self, optional: bool) { + self.optional = optional; + } +} + +impl Dependency for AMDRequireItemDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn category(&self) -> &DependencyCategory { + &DependencyCategory::Amd + } + + fn dependency_type(&self) -> &DependencyType { + &DependencyType::AmdRequireItem + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::True + } +} + +impl DependencyTemplate for AMDRequireItemDependency { + fn apply( + &self, + source: &mut TemplateReplaceSource, + code_generatable_context: &mut TemplateContext, + ) { + // ModuleDependencyTemplateAsRequireId + let content = module_raw( + code_generatable_context.compilation, + code_generatable_context.runtime_requirements, + &self.id, + &self.request, + self.weak(), + ); + source.replace(self.range.0, self.range.1, &content, None); + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl ModuleDependency for AMDRequireItemDependency { + fn request(&self) -> &str { + &self.request + } + + fn get_optional(&self) -> bool { + self.optional + } +} + +impl AsContextDependency for AMDRequireItemDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/local_module.rs b/crates/rspack_plugin_javascript/src/dependency/amd/local_module.rs new file mode 100644 index 000000000000..f531d94b7fc2 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/local_module.rs @@ -0,0 +1,38 @@ +use rspack_util::atom::Atom; + +#[derive(Debug, Clone)] +pub struct LocalModule { + name: Atom, + idx: usize, + used: bool, +} + +impl LocalModule { + pub fn new(name: Atom, idx: usize) -> Self { + Self { + name, + idx, + used: false, + } + } + + pub fn flag_used(&mut self) { + self.used = true; + } + + pub fn variable_name(&self) -> String { + format!("__WEBPACK_LOCAL_MODULE_{}__", self.idx) + } + + pub fn is_used(&self) -> bool { + self.used + } + + pub fn get_name(&self) -> &Atom { + &self.name + } + + pub fn get_idx(&self) -> usize { + self.idx + } +} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/local_module_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/local_module_dependency.rs new file mode 100644 index 000000000000..319b2740653a --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/local_module_dependency.rs @@ -0,0 +1,75 @@ +use rspack_core::{ + AffectType, AsContextDependency, AsModuleDependency, Compilation, Dependency, DependencyId, + DependencyTemplate, RuntimeSpec, TemplateContext, TemplateReplaceSource, +}; + +use super::local_module::LocalModule; + +#[derive(Debug, Clone)] +pub struct LocalModuleDependency { + id: DependencyId, + local_module: LocalModule, + range: Option<(u32, u32)>, + call_new: bool, +} + +impl LocalModuleDependency { + pub fn new(local_module: LocalModule, range: Option<(u32, u32)>, call_new: bool) -> Self { + Self { + id: DependencyId::new(), + local_module, + range, + call_new, + } + } + + pub fn get_variable_name(&self) -> String { + self.local_module.variable_name() + } +} + +impl Dependency for LocalModuleDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::False + } +} + +impl DependencyTemplate for LocalModuleDependency { + fn apply( + &self, + source: &mut TemplateReplaceSource, + _code_generatable_context: &mut TemplateContext, + ) { + if let Some(range) = self.range { + let module_instance = if self.call_new { + format!( + "new (function () {{ return {}; }})()", + self.local_module.variable_name() + ) + } else { + self.local_module.variable_name() + }; + source.replace(range.0, range.1, &module_instance, None); + } + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl AsModuleDependency for LocalModuleDependency {} + +impl AsContextDependency for LocalModuleDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/mod.rs b/crates/rspack_plugin_javascript/src/dependency/amd/mod.rs new file mode 100644 index 000000000000..c15c3261683a --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/mod.rs @@ -0,0 +1,7 @@ +pub mod amd_define_dependency; +pub mod amd_require_array_dependency; +pub mod amd_require_dependency; +pub mod amd_require_item_dependency; +pub mod local_module; +pub mod local_module_dependency; +pub mod unsupported_dependency; diff --git a/crates/rspack_plugin_javascript/src/dependency/amd/unsupported_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/amd/unsupported_dependency.rs new file mode 100644 index 000000000000..21eb584e2d08 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/dependency/amd/unsupported_dependency.rs @@ -0,0 +1,71 @@ +use rspack_core::{ + AffectType, AsContextDependency, AsModuleDependency, Compilation, Dependency, DependencyCategory, + DependencyId, DependencyTemplate, DependencyType, RuntimeSpec, TemplateContext, + TemplateReplaceSource, +}; +use rspack_util::atom::Atom; + +#[derive(Debug, Clone)] +pub struct UnsupportedDependency { + id: DependencyId, + request: Atom, + range: (u32, u32), +} + +impl UnsupportedDependency { + pub fn new(request: Atom, range: (u32, u32)) -> Self { + Self { + id: DependencyId::new(), + request, + range, + } + } +} + +impl Dependency for UnsupportedDependency { + fn id(&self) -> &DependencyId { + &self.id + } + + fn category(&self) -> &DependencyCategory { + &DependencyCategory::Unknown + } + + fn dependency_type(&self) -> &DependencyType { + &DependencyType::Unknown + } + + fn could_affect_referencing_module(&self) -> AffectType { + AffectType::False + } +} + +impl DependencyTemplate for UnsupportedDependency { + fn apply( + &self, + source: &mut TemplateReplaceSource, + _code_generatable_context: &mut TemplateContext, + ) { + let content = format!( + "Object(function webpackMissingModule() {{var e = new Error(\"Cannot find module '{}'\"); e.code = 'MODULE_NOT_FOUND'; throw e;}}())", + self.request + ); + source.replace(self.range.0, self.range.1, &content, None); + } + + fn dependency_id(&self) -> Option { + Some(self.id) + } + + fn update_hash( + &self, + _hasher: &mut dyn std::hash::Hasher, + _compilation: &Compilation, + _runtime: Option<&RuntimeSpec>, + ) { + } +} + +impl AsModuleDependency for UnsupportedDependency {} + +impl AsContextDependency for UnsupportedDependency {} diff --git a/crates/rspack_plugin_javascript/src/dependency/esm/external_module_dependency.rs b/crates/rspack_plugin_javascript/src/dependency/esm/external_module_dependency.rs index 384ca9e855c3..fd4a4ab0f3ad 100644 --- a/crates/rspack_plugin_javascript/src/dependency/esm/external_module_dependency.rs +++ b/crates/rspack_plugin_javascript/src/dependency/esm/external_module_dependency.rs @@ -33,9 +33,15 @@ impl DependencyTemplate for ExternalModuleDependency { _source: &mut TemplateReplaceSource, code_generatable_context: &mut TemplateContext, ) { + let need_prefix = code_generatable_context + .compilation + .options + .output + .environment + .supports_node_prefix_for_core_modules(); let chunk_init_fragments = code_generatable_context.chunk_init_fragments(); let fragment = ExternalModuleInitFragment::new( - self.module.clone(), + format!("{}{}", if need_prefix { "node:" } else { "" }, self.module), self.import_specifier.clone(), self.default_import.clone(), InitFragmentStage::StageConstants, diff --git a/crates/rspack_plugin_javascript/src/dependency/mod.rs b/crates/rspack_plugin_javascript/src/dependency/mod.rs index d4b68d94d080..f6e8ebbadbf6 100644 --- a/crates/rspack_plugin_javascript/src/dependency/mod.rs +++ b/crates/rspack_plugin_javascript/src/dependency/mod.rs @@ -1,3 +1,4 @@ +mod amd; mod commonjs; mod context; mod esm; @@ -9,6 +10,7 @@ mod pure_expression_dependency; mod url; mod worker; +pub use self::amd::*; pub use self::commonjs::*; pub use self::context::*; pub use self::esm::*; diff --git a/crates/rspack_plugin_javascript/src/parser_plugin/amd_define_dependency_parser_plugin.rs b/crates/rspack_plugin_javascript/src/parser_plugin/amd_define_dependency_parser_plugin.rs new file mode 100644 index 000000000000..fa8236bc2d48 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/parser_plugin/amd_define_dependency_parser_plugin.rs @@ -0,0 +1,597 @@ +use std::borrow::Cow; + +use rspack_core::{ + BuildMetaDefaultObject, BuildMetaExportsType, ConstDependency, RuntimeGlobals, SpanExt, +}; +use rspack_util::atom::Atom; +use rustc_hash::FxHashMap; +use swc_core::{ + common::{Span, Spanned}, + ecma::{ + ast::{BlockStmtOrExpr, CallExpr, Callee, Expr, Lit, Pat}, + utils::ExprExt, + }, +}; + +use super::JavascriptParserPlugin; +use crate::{ + dependency::{ + amd_define_dependency::AmdDefineDependency, + amd_require_item_dependency::AMDRequireItemDependency, + local_module_dependency::LocalModuleDependency, + }, + utils::eval::BasicEvaluatedExpression, + visitors::{scope_info::FreeName, JavascriptParser, Statement}, +}; + +pub struct AMDDefineDependencyParserPlugin; + +fn is_unbound_function_expression(expr: &Expr) -> bool { + expr.is_fn_expr() || expr.is_arrow() +} + +fn is_bound_function_expression(expr: &Expr) -> bool { + if !expr.is_call() { + return false; + } + + let call_expr = expr.as_call().expect("expr is supposed to be CallExpr"); + match &call_expr.callee { + Callee::Super(_) => return false, + Callee::Import(_) => return false, + Callee::Expr(callee) => { + if !callee.is_member() { + return false; + } + let callee_member = callee + .as_member() + .expect("callee is supposed to be MemberExpr"); + if callee_member.prop.is_computed() { + return false; + } + if !callee_member.obj.is_fn_expr() { + return false; + } + if !callee_member.prop.is_ident_with("bind") { + return false; + } + } + } + + true +} + +fn is_callable(expr: &Expr) -> bool { + is_unbound_function_expression(expr) || is_bound_function_expression(expr) +} + +/** + * lookup + * + * define('ui/foo/bar', ['./baz', '../qux'], ...); + * - 'ui/foo/baz' + * - 'ui/qux' + */ +fn resolve_mod_name(mod_name: &Option, dep_name: &str) -> Atom { + if let Some(mod_name) = mod_name + && dep_name.starts_with('.') + { + let mut path: Vec<&str> = mod_name.split('/').collect(); + path.pop(); + + for seg in dep_name.split('.') { + if seg == ".." { + path.pop(); + } else if seg != "." { + path.push(seg); + } + } + + path.join("/").into() + } else { + dep_name.into() + } +} + +const REQUIRE: &str = "require"; +const MODULE: &str = "module"; +const EXPORTS: &str = "exports"; +const RESERVED_NAMES: [&str; 3] = [REQUIRE, MODULE, EXPORTS]; + +fn span_to_range(span: Span) -> (u32, u32) { + (span.real_lo(), span.real_hi()) +} + +fn get_lit_str(expr: &Expr) -> Option { + expr.as_lit().and_then(|lit| match lit { + Lit::Str(s) => Some(s.value.clone()), + _ => None, + }) +} + +fn get_ident_name(pat: &Pat) -> Atom { + pat + .as_ident() + .map(|ident| ident.sym.clone()) + .unwrap_or("".into()) +} + +impl AMDDefineDependencyParserPlugin { + fn process_array( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + param: &BasicEvaluatedExpression, + identifiers: &mut FxHashMap, // param index => "require" | "module" | "exports" + named_module: &Option, + ) -> Option { + if param.is_array() { + let items = param.items(); + for (idx, item) in items.iter().enumerate() { + if item.is_string() { + let item = item.string(); + if let Some(i) = RESERVED_NAMES.iter().position(|s| s == item) { + identifiers.insert(idx, RESERVED_NAMES[i]); + } + } + let result = self.process_item(parser, call_expr, item, named_module); + if result.is_none() { + self.process_context(parser, call_expr, item); + } + } + return Some(true); + } + // currently, there is no ConstArray in rspack + // TODO: check if `param` is a const string array + None + } + + fn process_item( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + param: &BasicEvaluatedExpression, + named_module: &Option, + ) -> Option { + if param.is_conditional() { + let options = param.options(); + + for option in options.iter() { + let result = self.process_item(parser, call_expr, option, &None); + if result.is_none() { + self.process_context(parser, call_expr, param); + } + } + + return Some(true); + } else if param.is_string() { + let param_str = param.string(); + let range = { + let (l, h) = param.range(); + (l, h - 1) + }; + + let dep = if param_str == "require" { + Box::new(ConstDependency::new( + range.0, + range.1, + RuntimeGlobals::REQUIRE.name().into(), + Some(RuntimeGlobals::REQUIRE), + )) + } else if param_str == "exports" { + Box::new(ConstDependency::new( + range.0, + range.1, + EXPORTS.into(), + Some(RuntimeGlobals::EXPORTS), + )) + } else if param_str == "module" { + Box::new(ConstDependency::new( + range.0, + range.1, + MODULE.into(), + Some(RuntimeGlobals::MODULE), + )) + } else if let Some(local_module) = + parser.get_local_module_mut(&resolve_mod_name(named_module, param_str)) + { + local_module.flag_used(); + let dep = Box::new(LocalModuleDependency::new( + local_module.clone(), + Some((range.0, range.1)), + false, + )); + parser.presentational_dependencies.push(dep); + return Some(true); + } else { + let mut dep = Box::new(AMDRequireItemDependency::new( + Atom::new(param_str.as_str()), + range, + )); + dep.set_optional(parser.in_try); + parser.dependencies.push(dep); + return Some(true); + }; + // TODO: how to implement this? + // dep.loc = /** @type {DependencyLocation} */ (expr.loc); + parser.presentational_dependencies.push(dep); + return Some(true); + } + None + } + + fn process_context( + &self, + _parser: &mut JavascriptParser, + _call_expr: &CallExpr, + _param: &BasicEvaluatedExpression, + ) -> Option { + // TODO: support amd context dep + None + } + + fn process_call_define( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + ) -> Option { + let mut array: Option<&Expr> = None; + let mut func: Option<&Expr> = None; + let mut obj: Option<&Expr> = None; + let mut named_module: Option = None; + + match call_expr.args.len() { + 1 => { + let first_arg = &call_expr.args[0]; + + // We don't support spread syntax in `define()` + if first_arg.spread.is_some() { + return None; + } + + if is_callable(&first_arg.expr) { + // define(f() {…}) + func = Some(&first_arg.expr); + } else if first_arg.expr.is_object() { + // define({…}) + obj = Some(&first_arg.expr); + } else { + // define(expr) + // unclear if function or object + func = Some(&first_arg.expr); + obj = Some(&first_arg.expr); + } + } + 2 => { + let first_arg = &call_expr.args[0]; + let second_arg = &call_expr.args[1]; + + // We don't support spread syntax in `define()` + if first_arg.spread.is_some() || second_arg.spread.is_some() { + return None; + } + + if first_arg.expr.is_lit() { + // define("…", …) + named_module = get_lit_str(&first_arg.expr); + + if is_callable(&second_arg.expr) { + // define("…", f() {…}) + func = Some(&second_arg.expr); + } else if second_arg.expr.is_object() { + // define("…", {…}) + obj = Some(&second_arg.expr); + } else { + // define("…", expr) + // unclear if function or object + func = Some(&second_arg.expr); + obj = Some(&second_arg.expr); + } + } else { + // define([…], …) + if !first_arg.expr.is_array() { + return None; + } + + array = Some(&first_arg.expr); + + if is_callable(&second_arg.expr) { + // define([…], f() {}) + func = Some(&second_arg.expr); + } else if second_arg.expr.is_object() { + // define([…], {…}) + obj = Some(&second_arg.expr); + } else { + // define([…], expr) + // unclear if function or object + func = Some(&second_arg.expr); + obj = Some(&second_arg.expr); + } + } + } + 3 => { + // define("…", […], …) + + let first_arg = &call_expr.args[0]; + let second_arg = &call_expr.args[1]; + let third_arg = &call_expr.args[2]; + + // We don't support spread syntax in `define()` + if first_arg.spread.is_some() || second_arg.spread.is_some() || third_arg.spread.is_some() { + return None; + } + + if !first_arg.expr.is_lit() { + return None; + } + if !second_arg.expr.is_array_lit() { + return None; + } + + named_module = get_lit_str(&first_arg.expr); + array = Some(&second_arg.expr); + + if is_callable(&third_arg.expr) { + // define("…", […], f() {}) + func = Some(&third_arg.expr); + } else if third_arg.expr.is_object() { + // define("…", […], {…}) + obj = Some(&third_arg.expr); + } else { + // define("…", […], expr) + // unclear if function or object + func = Some(&third_arg.expr); + obj = Some(&third_arg.expr); + } + } + _ => return None, + } + + { + // DynamicExports.bailout(parser.state); + // TODO: consider how to share this code + if parser.parser_exports_state.is_some_and(|x| x) { + parser.build_meta.exports_type = BuildMetaExportsType::Unset; + parser.build_meta.default_object = BuildMetaDefaultObject::False; + } + parser.parser_exports_state = Some(false); + } + + let mut fn_params: Option>> = None; + let mut fn_params_offset = 0usize; + if let Some(func) = func { + if is_unbound_function_expression(func) { + fn_params = match func { + Expr::Fn(normal_func) => Some( + normal_func + .function + .params + .iter() + .map(|param| Cow::Borrowed(¶m.pat)) + .collect(), + ), + Expr::Arrow(array_func) => Some(array_func.params.iter().map(Cow::Borrowed).collect()), + _ => None, + }; + } else if is_bound_function_expression(func) { + let call_expr = func + .as_call() + .expect("call_expr is supposed to be a CallExpr"); + let object = &call_expr + .callee + .as_expr() + .expect("call_expr.callee is supposed to be Expr") + .as_member() + .expect("call_expr.callee is supposed to be MemberExpr") + .obj + .as_fn_expr() + .expect("call_expr.callee.obj is supposed to be FnExpr"); + + fn_params = Some( + object + .function + .params + .iter() + .map(|param| Cow::Borrowed(¶m.pat)) + .collect(), + ); + + if !call_expr.args.is_empty() { + fn_params_offset = call_expr.args.len() - 1; + } + } + } + + // TODO: ensure all fn_params are identifiers + + let mut fn_renames = FxHashMap::default(); + if let Some(array) = array { + let mut identifiers = FxHashMap::default(); + let param = parser.evaluate_expression(array); + let result = self.process_array(parser, call_expr, ¶m, &mut identifiers, &named_module); + if !result.is_some_and(|b| b) { + return None; + } + if let Some(fn_params) = &mut fn_params { + let mut i = 0usize; + fn_params.retain(|param| { + if i < fn_params_offset { + return false; + } + let idx = i - fn_params_offset; + i += 1; + if let Some(&name) = identifiers.get(&idx) { + fn_renames.insert(get_ident_name(param), name); + return false; + } + true + }); + } + } else if let Some(fn_params) = &mut fn_params { + let mut i = 0usize; + fn_params.retain(|param| { + if i < fn_params_offset { + return false; + } + let idx = i - fn_params_offset; + i += 1; + if idx < RESERVED_NAMES.len() { + fn_renames.insert(get_ident_name(param), RESERVED_NAMES[idx]); + return false; + } + true + }); + } + + if func.is_some_and(is_unbound_function_expression) { + let in_try = parser.in_try; + parser.in_function_scope( + true, + fn_params.expect("fn_params should not be None").into_iter(), + |parser| { + for (name, &rename_identifier) in fn_renames.iter() { + let variable = parser + .get_variable_info(rename_identifier) + .and_then(|info| info.free_name.as_ref()) + .and_then(|free_name| match free_name { + FreeName::String(s) => Some(s.to_string()), + FreeName::True => None, + }) + .unwrap_or(rename_identifier.to_string()); + parser.set_variable(name.to_string(), variable); + } + + parser.in_try = in_try; + + if let Some(func) = func.and_then(|f| f.as_fn_expr()) { + if let Some(body) = &func.function.body { + parser.detect_mode(&body.stmts); + let prev = parser.prev_statement; + parser.pre_walk_statement(Statement::Block(body)); + parser.prev_statement = prev; + parser.walk_statement(Statement::Block(body)); + } + } else if let Some(func) = func.and_then(|f| f.as_arrow()) { + match &*func.body { + BlockStmtOrExpr::BlockStmt(stmt) => { + parser.detect_mode(&stmt.stmts); + let prev = parser.prev_statement; + parser.pre_walk_statement(Statement::Block(stmt)); + parser.prev_statement = prev; + parser.walk_statement(Statement::Block(stmt)); + } + BlockStmtOrExpr::Expr(expr) => parser.walk_expression(expr), + } + } + }, + ); + } else if func.is_some_and(is_bound_function_expression) { + let in_try = parser.in_try; + + if let Some(call_expr) = func.and_then(|f| f.as_call()) { + let object = call_expr + .callee + .as_expr() + .and_then(|expr| expr.as_member()) + .and_then(|member_expr| member_expr.obj.as_fn_expr()); + + if let Some(func_expr) = object { + parser.in_function_scope( + true, + func_expr + .function + .params + .iter() + .map(|param| Cow::Borrowed(¶m.pat)) + .filter(|pat| { + pat + .as_ident() + .is_some_and(|ident| !RESERVED_NAMES.contains(&ident.sym.as_str())) + }), + |parser| { + for (name, &rename_identifier) in fn_renames.iter() { + let variable = parser + .get_variable_info(rename_identifier) + .and_then(|info| info.free_name.as_ref()) + .and_then(|free_name| match free_name { + FreeName::String(s) => Some(s.to_string()), + FreeName::True => None, + }) + .unwrap_or(rename_identifier.to_string()); + parser.set_variable(name.to_string(), variable); + } + + parser.in_try = in_try; + + if let Some(body) = &func_expr.function.body { + parser.detect_mode(&body.stmts); + let prev = parser.prev_statement; + parser.pre_walk_statement(Statement::Block(body)); + parser.prev_statement = prev; + parser.walk_statement(Statement::Block(body)); + } + }, + ); + } + + parser.walk_expr_or_spread(&call_expr.args); + } + } else if let Some(expr) = func { + parser.walk_expression(expr); + } else if let Some(expr) = obj { + parser.walk_expression(expr); + } + + let local_module = named_module + .as_ref() + .map(|name| parser.add_local_module(name.as_str())); + + let dep = Box::new(AmdDefineDependency::new( + (call_expr.span.real_lo(), call_expr.span.real_hi()), + array.map(|expr| span_to_range(expr.span())), + func.map(|expr| span_to_range(expr.span())), + obj.map(|expr| span_to_range(expr.span())), + named_module, + local_module, + )); + + parser.presentational_dependencies.push(dep); + + Some(true) + } +} + +impl JavascriptParserPlugin for AMDDefineDependencyParserPlugin { + fn call( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + for_name: &str, + ) -> Option { + if for_name == "define" { + self.process_call_define(parser, call_expr) + } else { + None + } + } + + /** + * unlike js, it's hard to share the LocalModule instance in Rust. + * so the AmdDefineDependency will get a clone of LocalModule in parser.local_modules. + * synchronize the used flag to the AmdDefineDependency's local_module at the end of the parse. + */ + fn finish(&self, parser: &mut JavascriptParser) -> Option { + for dep in parser.presentational_dependencies.iter_mut() { + if let Some(define_dep) = dep.as_any_mut().downcast_mut::() + && let Some(local_module) = define_dep.get_local_module_mut() + && parser + .local_modules + .get(local_module.get_idx()) + .is_some_and(|m| m.is_used()) + { + local_module.flag_used(); + } + } + None + } +} diff --git a/crates/rspack_plugin_javascript/src/parser_plugin/amd_require_dependencies_block_parser_plugin.rs b/crates/rspack_plugin_javascript/src/parser_plugin/amd_require_dependencies_block_parser_plugin.rs new file mode 100644 index 000000000000..549f2b0ddef5 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/parser_plugin/amd_require_dependencies_block_parser_plugin.rs @@ -0,0 +1,335 @@ +use std::{borrow::Cow, iter}; + +use either::Either; +use itertools::Itertools; +use rspack_core::{ + AsyncDependenciesBlock, BoxDependency, ConstDependency, DependencyLocation, DependencyRange, + RuntimeGlobals, SpanExt, +}; +use rspack_error::miette::Severity; +use rspack_util::atom::Atom; +use swc_core::{ + common::Spanned, + ecma::ast::{BlockStmtOrExpr, CallExpr, ExprOrSpread, Pat}, +}; + +use super::{ + require_ensure_dependencies_block_parse_plugin::GetFunctionExpression, JavascriptParserPlugin, +}; +use crate::{ + dependency::{ + amd_require_dependency::AMDRequireDependency, + amd_require_item_dependency::AMDRequireItemDependency, + local_module_dependency::LocalModuleDependency, unsupported_dependency::UnsupportedDependency, + }, + utils::eval::BasicEvaluatedExpression, + visitors::{create_traceable_error, JavascriptParser, Statement}, +}; + +fn is_reserved_param(pat: &Pat) -> bool { + const RESERVED_NAMES: [&str; 3] = ["require", "module", "exports"]; + pat + .as_ident() + .is_some_and(|ident| RESERVED_NAMES.contains(&ident.id.sym.as_str())) +} + +pub struct AMDRequireDependenciesBlockParserPlugin; + +impl JavascriptParserPlugin for AMDRequireDependenciesBlockParserPlugin { + fn call( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + for_name: &str, + ) -> Option { + if for_name == "require" { + self.process_call_require(parser, call_expr) + } else { + None + } + } +} + +impl AMDRequireDependenciesBlockParserPlugin { + fn process_array( + &self, + parser: &mut JavascriptParser, + block_deps: &mut Vec, + call_expr: &CallExpr, + param: &BasicEvaluatedExpression, + ) -> Option { + if param.is_array() { + for item in param.items().iter() { + let result = self.process_item(parser, block_deps, call_expr, item); + if result.is_none() { + self.process_context(parser, call_expr, item); + } + } + return Some(true); + } + None + } + + fn process_item( + &self, + parser: &mut JavascriptParser, + block_deps: &mut Vec, + call_expr: &CallExpr, + param: &BasicEvaluatedExpression, + ) -> Option { + if param.is_conditional() { + let options = param.options(); + + for option in options.iter() { + let result = self.process_item(parser, block_deps, call_expr, option); + if result.is_none() { + self.process_context(parser, call_expr, param); + } + } + + return Some(true); + } else if param.is_string() { + let param_str = param.string(); + let range = { + let (l, h) = param.range(); + (l, h - 1) + }; + + if param_str == "require" { + let dep = Box::new(ConstDependency::new( + range.0, + range.1, + RuntimeGlobals::REQUIRE.name().into(), + Some(RuntimeGlobals::REQUIRE), + )); + parser.presentational_dependencies.push(dep); + } else if param_str == "module" { + let dep = Box::new(ConstDependency::new( + range.0, + range.1, + "module".into(), + Some(RuntimeGlobals::MODULE), + )); + parser.presentational_dependencies.push(dep); + } else if param_str == "exports" { + let dep = Box::new(ConstDependency::new( + range.0, + range.1, + "exports".into(), + Some(RuntimeGlobals::EXPORTS), + )); + parser.presentational_dependencies.push(dep); + } else if let Some(local_module) = parser.get_local_module_mut(param_str) { + local_module.flag_used(); + let dep = Box::new(LocalModuleDependency::new( + local_module.clone(), + Some((range.0, range.1)), + false, + )); + parser.presentational_dependencies.push(dep); + return Some(true); + } else { + let mut dep = Box::new(AMDRequireItemDependency::new( + Atom::new(param_str.as_str()), + range, + )); + dep.set_optional(parser.in_try); + block_deps.push(dep); + } + + return Some(true); + } + None + } + + fn process_context( + &self, + _parser: &mut JavascriptParser, + _call_expr: &CallExpr, + _param: &BasicEvaluatedExpression, + ) -> Option { + // TODO: support amd context dep + None + } + + fn process_array_for_request_string(&self, param: &BasicEvaluatedExpression) -> Option { + if param.is_array() { + let mut result = param + .items() + .iter() + .map(|item| self.process_item_for_request_string(item)); + if result.all(|item| item.is_some()) { + return Some(result.map(|item| item.expect("")).join(" ")); + } + } + None + } + + #[allow(clippy::only_used_in_recursion)] + fn process_item_for_request_string(&self, param: &BasicEvaluatedExpression) -> Option { + if param.is_conditional() { + let mut result = param + .options() + .iter() + .map(|item| self.process_item_for_request_string(item)); + if result.all(|item| item.is_some()) { + return Some(result.map(|item| item.expect("")).join("|")); + } + } else if param.is_string() { + return Some(param.string().to_string()); + } + None + } + + fn process_function_argument( + &self, + parser: &mut JavascriptParser, + func_arg: &ExprOrSpread, + ) -> bool { + let mut bind_this = true; + + if let Some(func_expr) = func_arg.expr.get_function_expr() { + match func_expr.func { + Either::Left(func) => { + if let Some(body) = &func.function.body { + let params = func + .function + .params + .iter() + .filter(|param| !is_reserved_param(¶m.pat)) + .map(|param| Cow::Borrowed(¶m.pat)); + parser.in_function_scope(true, params, |parser| { + parser.walk_statement(Statement::Block(body)); + }); + } + } + Either::Right(arrow) => { + let params = arrow + .params + .iter() + .filter(|param| !is_reserved_param(param)) + .map(Cow::Borrowed); + parser.in_function_scope(true, params, |parser| match &*arrow.body { + BlockStmtOrExpr::BlockStmt(body) => parser.walk_statement(Statement::Block(body)), + BlockStmtOrExpr::Expr(expr) => parser.walk_expression(expr), + }); + } + } + + if let Some(bind_expr) = func_expr.expressions { + parser.walk_expression(bind_expr); + } + + if func_expr._need_this.is_some_and(|x| !x) { + bind_this = false; + } + } else { + parser.walk_expression(&func_arg.expr); + } + + bind_this + } + + fn process_call_require( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + ) -> Option { + if call_expr.args.is_empty() { + return None; + } + // TODO: check if args includes spread + + // require(['dep1', 'dep2'], callback, errorCallback); + + let first_arg = call_expr.args.first().expect("first arg cannot be None"); + let callback_arg = call_expr.args.get(1); + let error_callback_arg = call_expr.args.get(2); + + let param = parser.evaluate_expression(&first_arg.expr); + + let mut dep = Box::new(AMDRequireDependency::new( + (call_expr.span.real_lo(), call_expr.span.real_hi()), + Some(( + first_arg.expr.span().real_lo(), + first_arg.expr.span().real_hi(), + )), + callback_arg.map(|arg| (arg.expr.span().real_lo(), arg.expr.span().real_hi())), + error_callback_arg.map(|arg| (arg.expr.span().real_lo(), arg.expr.span().real_hi())), + )); + + let block_loc = Some(DependencyLocation::Real( + Into::::into(call_expr.span).with_source(parser.source_map.clone()), + )); + + if call_expr.args.len() == 1 { + let mut block_deps: Vec = vec![dep]; + let mut result = None; + parser.in_function_scope(true, iter::empty(), |parser| { + result = self.process_array(parser, &mut block_deps, call_expr, ¶m); + }); + if result.is_some_and(|x| x) { + let dep_block = Box::new(AsyncDependenciesBlock::new( + *parser.module_identifier, + block_loc, + None, + block_deps, + self.process_array_for_request_string(¶m), + )); + parser.blocks.push(dep_block); + return Some(true); + } else { + return None; + } + } + + if call_expr.args.len() == 2 || call_expr.args.len() == 3 { + let mut block_deps: Vec = vec![]; + + let mut result = None; + parser.in_function_scope(true, iter::empty(), |parser| { + result = self.process_array(parser, &mut block_deps, call_expr, ¶m) + }); + + if !result.is_some_and(|x| x) { + let dep = Box::new(UnsupportedDependency::new( + "unsupported".into(), + (call_expr.span.real_lo(), call_expr.span.real_hi()), + )); + parser.presentational_dependencies.push(dep); + parser.warning_diagnostics.push(Box::new( + create_traceable_error( + "UnsupportedFeatureWarning".into(), + "Cannot statically analyse 'require(…, …)'".into(), + parser.source_file, + call_expr.span.into(), + ) + .with_severity(Severity::Warning) + .with_hide_stack(Some(true)), + )); + return Some(true); + } + + dep.function_bind_this = + self.process_function_argument(parser, callback_arg.expect("2nd arg cannot be None")); + + if let Some(error_callback_arg) = error_callback_arg { + dep.error_callback_bind_this = self.process_function_argument(parser, error_callback_arg); + } + + block_deps.insert(0, dep); + let dep_block = Box::new(AsyncDependenciesBlock::new( + *parser.module_identifier, + block_loc, + None, + block_deps, + self.process_array_for_request_string(¶m), + )); + parser.blocks.push(dep_block); + + return Some(true); + } + + None + } +} diff --git a/crates/rspack_plugin_javascript/src/parser_plugin/mod.rs b/crates/rspack_plugin_javascript/src/parser_plugin/mod.rs index 9e01a7894d6e..a09ed097b45a 100644 --- a/crates/rspack_plugin_javascript/src/parser_plugin/mod.rs +++ b/crates/rspack_plugin_javascript/src/parser_plugin/mod.rs @@ -27,10 +27,15 @@ mod use_strict_plugin; mod webpack_included_plugin; mod worker_plugin; +pub mod amd_define_dependency_parser_plugin; +pub mod amd_require_dependencies_block_parser_plugin; pub mod define_plugin; pub mod hot_module_replacement_plugin; pub mod provide_plugin; +pub mod require_js_stuff_plugin; +pub(crate) use self::amd_define_dependency_parser_plugin::AMDDefineDependencyParserPlugin; +pub(crate) use self::amd_require_dependencies_block_parser_plugin::AMDRequireDependenciesBlockParserPlugin; pub(crate) use self::api_plugin::APIPlugin; pub(crate) use self::check_var_decl::CheckVarDeclaratorIdent; pub(crate) use self::common_js_exports_parse_plugin::CommonJsExportsParserPlugin; @@ -55,6 +60,7 @@ pub(crate) use self::r#const::{is_logic_op, ConstPlugin}; pub use self::r#trait::{BoxJavascriptParserPlugin, JavascriptParserPlugin}; pub(crate) use self::require_context_dependency_parser_plugin::RequireContextDependencyParserPlugin; pub(crate) use self::require_ensure_dependencies_block_parse_plugin::RequireEnsureDependenciesBlockParserPlugin; +pub(crate) use self::require_js_stuff_plugin::RequireJsStuffPlugin; pub(crate) use self::url_plugin::URLPlugin; pub(crate) use self::use_strict_plugin::UseStrictPlugin; pub(crate) use self::webpack_included_plugin::WebpackIsIncludedPlugin; diff --git a/crates/rspack_plugin_javascript/src/parser_plugin/require_ensure_dependencies_block_parse_plugin.rs b/crates/rspack_plugin_javascript/src/parser_plugin/require_ensure_dependencies_block_parse_plugin.rs index 3bbe6fd17863..93bfc207d571 100644 --- a/crates/rspack_plugin_javascript/src/parser_plugin/require_ensure_dependencies_block_parse_plugin.rs +++ b/crates/rspack_plugin_javascript/src/parser_plugin/require_ensure_dependencies_block_parse_plugin.rs @@ -178,14 +178,14 @@ impl JavascriptParserPlugin for RequireEnsureDependenciesBlockParserPlugin { } } -struct FunctionExpression<'a> { - func: Either<&'a FnExpr, &'a ArrowExpr>, - expressions: Option<&'a Expr>, +pub(crate) struct FunctionExpression<'a> { + pub(crate) func: Either<&'a FnExpr, &'a ArrowExpr>, + pub(crate) expressions: Option<&'a Expr>, // Used by AMD - _need_this: Option, + pub(crate) _need_this: Option, } -trait GetFunctionExpression { +pub(crate) trait GetFunctionExpression { fn get_function_expr(&self) -> Option; fn inner_paren(&self) -> &Self; } diff --git a/crates/rspack_plugin_javascript/src/parser_plugin/require_js_stuff_plugin.rs b/crates/rspack_plugin_javascript/src/parser_plugin/require_js_stuff_plugin.rs new file mode 100644 index 000000000000..5269f9116b37 --- /dev/null +++ b/crates/rspack_plugin_javascript/src/parser_plugin/require_js_stuff_plugin.rs @@ -0,0 +1,198 @@ +use rspack_core::{ConstDependency, RuntimeGlobals, SpanExt}; +use swc_core::ecma::ast::{CallExpr, Expr, MemberExpr}; +use swc_core::{common::Spanned, ecma::ast::UnaryExpr}; + +use super::JavascriptParserPlugin; +use crate::utils::eval::{evaluate_to_identifier, evaluate_to_string, BasicEvaluatedExpression}; +use crate::visitors::{expr_matcher, JavascriptParser}; + +pub struct RequireJsStuffPlugin; + +const DEFINE: &str = "define"; +const REQUIRE: &str = "require"; +const DEFINE_AMD: &str = "define.amd"; +const REQUIRE_AMD: &str = "require.amd"; + +impl JavascriptParserPlugin for RequireJsStuffPlugin { + fn call( + &self, + parser: &mut JavascriptParser, + call_expr: &CallExpr, + for_name: &str, + ) -> Option { + if for_name == "require.config" || for_name == "requirejs.config" { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + call_expr.span.real_lo(), + call_expr.span.real_hi(), + "undefined".into(), + None, + ))); + Some(true) + } else { + None + } + } + + fn member( + &self, + parser: &mut JavascriptParser, + expr: &MemberExpr, + _for_name: &str, + ) -> Option { + if expr_matcher::is_require_version(expr) { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span.real_lo(), + expr.span.real_hi(), + "\"0.0.0\"".into(), + None, + ))); + return Some(true); + } + + if expr_matcher::is_require_onerror(expr) || expr_matcher::is_requirejs_onerror(expr) { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span.real_lo(), + expr.span.real_hi(), + RuntimeGlobals::UNCAUGHT_ERROR_HANDLER.name().into(), + Some(RuntimeGlobals::UNCAUGHT_ERROR_HANDLER), + ))); + return Some(true); + } + + // AMDPlugin + if expr_matcher::is_define_amd(expr) || expr_matcher::is_require_amd(expr) { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span.real_lo(), + expr.span.real_hi(), + RuntimeGlobals::AMD_OPTIONS.name().into(), + Some(RuntimeGlobals::AMD_OPTIONS), + ))); + return Some(true); + } + + None + } + + // The following is the logic from AMDPlugin, which mainly applies + // AMDDefineDependencyParserPlugin and AMDRequireDependenciesBlockParserPlugin. + // It also has some require.js related logic. I moved the logic here + // to avoid creating a `AMDPlugin` with just a few lines of code. + + fn r#typeof( + &self, + parser: &mut JavascriptParser, + expr: &UnaryExpr, + for_name: &str, + ) -> Option { + if for_name == DEFINE || for_name == REQUIRE { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span.real_lo(), + expr.span.real_hi(), + "\"function\"".into(), + None, + ))); + return Some(true); + } + + if for_name == DEFINE_AMD || for_name == REQUIRE_AMD { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span.real_lo(), + expr.span.real_hi(), + "\"object\"".into(), + None, + ))); + return Some(true); + } + + None + } + + fn evaluate_typeof( + &self, + _parser: &mut JavascriptParser, + expr: &UnaryExpr, + for_name: &str, + ) -> Option { + if for_name == DEFINE || for_name == REQUIRE { + return Some(evaluate_to_string( + "function".to_string(), + expr.span.real_lo(), + expr.span.real_hi(), + )); + } + + if for_name == DEFINE_AMD || for_name == REQUIRE_AMD { + return Some(evaluate_to_string( + "object".to_string(), + expr.span.real_lo(), + expr.span.real_hi(), + )); + } + + None + } + + fn evaluate_identifier( + &self, + _parser: &mut JavascriptParser, + ident: &str, + start: u32, + end: u32, + ) -> Option { + if ident == DEFINE_AMD { + return Some(evaluate_to_identifier( + ident.to_string(), + "define".to_string(), + Some(true), + start, + end, + )); + } + + if ident == REQUIRE_AMD { + return Some(evaluate_to_identifier( + ident.to_string(), + "require".to_string(), + Some(true), + start, + end, + )); + } + + None + } + + fn can_rename(&self, _parser: &mut JavascriptParser, for_name: &str) -> Option { + if for_name == DEFINE { + return Some(true); + } + None + } + + fn rename(&self, parser: &mut JavascriptParser, expr: &Expr, for_name: &str) -> Option { + if for_name == DEFINE { + parser + .presentational_dependencies + .push(Box::new(ConstDependency::new( + expr.span().real_lo(), + expr.span().real_hi(), + RuntimeGlobals::AMD_DEFINE.name().into(), + Some(RuntimeGlobals::AMD_DEFINE), + ))); + return Some(true); + } + None + } +} diff --git a/crates/rspack_plugin_javascript/src/plugin/api_plugin.rs b/crates/rspack_plugin_javascript/src/plugin/api_plugin.rs index 43e912338428..51b9814de0ff 100644 --- a/crates/rspack_plugin_javascript/src/plugin/api_plugin.rs +++ b/crates/rspack_plugin_javascript/src/plugin/api_plugin.rs @@ -28,7 +28,7 @@ async fn compilation( #[plugin_hook(JavascriptModulesRenderModuleContent for APIPlugin)] fn render_module_content( &self, - _compilation: &Compilation, + compilation: &Compilation, module: &BoxModule, _source: &mut RenderSource, init_fragments: &mut ChunkInitFragments, @@ -36,9 +36,18 @@ fn render_module_content( if let Some(build_info) = module.build_info() && build_info.need_create_require { + let need_prefix = compilation + .options + .output + .environment + .supports_node_prefix_for_core_modules(); + init_fragments.push( NormalInitFragment::new( - "import { createRequire as __WEBPACK_EXTERNAL_createRequire } from 'module';\n".to_string(), + format!( + "import {{ createRequire as __WEBPACK_EXTERNAL_createRequire }} from \"{}\";\n", + if need_prefix { "node:module" } else { "module" } + ), InitFragmentStage::StageESMImports, 0, InitFragmentKey::ModuleExternal("node-commonjs".to_string()), diff --git a/crates/rspack_plugin_javascript/src/plugin/impl_plugin_for_js_plugin.rs b/crates/rspack_plugin_javascript/src/plugin/impl_plugin_for_js_plugin.rs index 8b41e3d0a0f4..f08c917d1806 100644 --- a/crates/rspack_plugin_javascript/src/plugin/impl_plugin_for_js_plugin.rs +++ b/crates/rspack_plugin_javascript/src/plugin/impl_plugin_for_js_plugin.rs @@ -2,9 +2,9 @@ use std::hash::Hash; use std::sync::Arc; use async_trait::async_trait; -use rspack_core::rspack_sources::BoxSource; +use rspack_core::rspack_sources::{BoxSource, CachedSource, SourceExt}; use rspack_core::{ - get_js_chunk_filename_template, ChunkGraph, ChunkKind, ChunkUkey, Compilation, + get_js_chunk_filename_template, AssetInfo, ChunkGraph, ChunkKind, ChunkUkey, Compilation, CompilationAdditionalTreeRuntimeRequirements, CompilationChunkHash, CompilationContentHash, CompilationParams, CompilationRenderManifest, CompilerCompilation, CompilerOptions, DependencyType, IgnoreErrorModuleFactory, ModuleGraph, ModuleType, ParserAndGenerator, PathData, @@ -66,6 +66,11 @@ async fn compilation( DependencyType::RequireResolve, params.normal_module_factory.clone(), ); + // AMDPlugin + compilation.set_dependency_factory( + DependencyType::AmdRequireItem, + params.normal_module_factory.clone(), + ); // RequireContextPlugin compilation.set_dependency_factory( DependencyType::RequireContext, @@ -234,29 +239,42 @@ async fn render_manifest( _diagnostics: &mut Vec, ) -> Result<()> { let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); - let source = if matches!(chunk.kind(), ChunkKind::HotUpdate) { - self.render_chunk(compilation, chunk_ukey).await? - } else if chunk.has_runtime(&compilation.chunk_group_by_ukey) { - self.render_main(compilation, chunk_ukey).await? - } else { - if !chunk_has_js( + let is_hot_update = matches!(chunk.kind(), ChunkKind::HotUpdate); + let is_main_chunk = chunk.has_runtime(&compilation.chunk_group_by_ukey); + if !is_hot_update + && !is_main_chunk + && !chunk_has_js( chunk_ukey, &compilation.chunk_graph, &compilation.get_module_graph(), - ) { - return Ok(()); - } - - self.render_chunk(compilation, chunk_ukey).await? - }; + ) + { + return Ok(()); + } + let (source, _) = compilation + .old_cache + .chunk_render_occasion + .use_cache(compilation, chunk, &SourceType::JavaScript, || async { + let source = if is_hot_update { + self.render_chunk(compilation, chunk_ukey).await? + } else if is_main_chunk { + self.render_main(compilation, chunk_ukey).await? + } else { + self.render_chunk(compilation, chunk_ukey).await? + }; + Ok((CachedSource::new(source).boxed(), Vec::new())) + }) + .await?; let filename_template = get_js_chunk_filename_template( chunk, &compilation.options.output, &compilation.chunk_group_by_ukey, ); - let (output_path, mut asset_info) = compilation.get_path_with_info( - filename_template, + let mut asset_info = AssetInfo::default(); + asset_info.set_javascript_module(compilation.options.output.module); + let output_path = compilation.get_path_with_info( + &filename_template, PathData::default() .chunk_hash_optional(chunk.rendered_hash( &compilation.chunk_hashes_results, @@ -270,15 +288,16 @@ async fn render_manifest( compilation.options.output.hash_digest_length, )) .runtime(chunk.runtime().as_str()), + &mut asset_info, )?; asset_info.set_javascript_module(compilation.options.output.module); - manifest.push(RenderManifestEntry::new( + manifest.push(RenderManifestEntry { source, - output_path, - asset_info, - false, - false, - )); + filename: output_path, + has_filename: false, + info: asset_info, + auxiliary: false, + }); Ok(()) } diff --git a/crates/rspack_plugin_javascript/src/visitors/dependency/parser/mod.rs b/crates/rspack_plugin_javascript/src/visitors/dependency/parser/mod.rs index 7e8cf0616f8c..9e63d641c268 100644 --- a/crates/rspack_plugin_javascript/src/visitors/dependency/parser/mod.rs +++ b/crates/rspack_plugin_javascript/src/visitors/dependency/parser/mod.rs @@ -29,6 +29,7 @@ use swc_core::ecma::ast::{ use swc_core::ecma::ast::{Expr, Ident, Lit, MemberExpr, RestPat}; use swc_core::ecma::utils::ExprFactory; +use crate::dependency::local_module::LocalModule; use crate::parser_plugin::InnerGraphState; use crate::parser_plugin::{self, JavaScriptParserPluginDrive, JavascriptParserPlugin}; use crate::utils::eval::{self, BasicEvaluatedExpression}; @@ -240,6 +241,7 @@ pub struct JavascriptParser<'parser> { pub(crate) statement_path: Vec, pub(crate) prev_statement: Option, pub(crate) current_tag_info: Option, + pub(crate) local_modules: Vec, // ===== scope info ======= pub(crate) in_try: bool, pub(crate) in_short_hand: bool, @@ -308,6 +310,14 @@ impl<'parser> JavascriptParser<'parser> { plugins.push(Box::new(parser_plugin::ESMExportDependencyParserPlugin)); } + if compiler_options.amd.is_some() && (module_type.is_js_auto() || module_type.is_js_dynamic()) { + plugins.push(Box::new( + parser_plugin::AMDRequireDependenciesBlockParserPlugin, + )); + plugins.push(Box::new(parser_plugin::AMDDefineDependencyParserPlugin)); + plugins.push(Box::new(parser_plugin::RequireJsStuffPlugin)); + } + if module_type.is_js_auto() || module_type.is_js_dynamic() { plugins.push(Box::new(parser_plugin::CommonJsImportsParserPlugin)); plugins.push(Box::new(parser_plugin::CommonJsPlugin)); @@ -388,9 +398,29 @@ impl<'parser> JavascriptParser<'parser> { inner_graph: InnerGraphState::new(), additional_data, parse_meta, + local_modules: Default::default(), } } + pub fn add_local_module(&mut self, name: &str) -> LocalModule { + let m = LocalModule::new(name.into(), self.local_modules.len()); + self.local_modules.push(m.clone()); + m + } + + pub fn get_local_module(&self, name: &str) -> Option { + for m in self.local_modules.iter() { + if m.get_name() == name { + return Some(m.clone()); + } + } + None + } + + pub fn get_local_module_mut(&mut self, name: &str) -> Option<&mut LocalModule> { + self.local_modules.iter_mut().find(|m| m.get_name() == name) + } + pub fn is_asi_position(&self, pos: BytePos) -> bool { let curr_path = self.statement_path.last().expect("Should in statement"); if curr_path.span_hi() == pos && self.semicolons.contains(&pos) { @@ -482,7 +512,7 @@ impl<'parser> JavascriptParser<'parser> { self.definitions_db.set(definitions, name, info); } - fn set_variable(&mut self, name: String, variable: String) { + pub fn set_variable(&mut self, name: String, variable: String) { let id = self.definitions; if name == variable { self.definitions_db.delete(id, &name); @@ -881,7 +911,7 @@ impl<'parser> JavascriptParser<'parser> { current_scope.is_strict = value; } - fn detect_mode(&mut self, stmts: &[Stmt]) { + pub fn detect_mode(&mut self, stmts: &[Stmt]) { let Some(Lit::Str(str)) = stmts .first() .and_then(|stmt| stmt.as_expr()) diff --git a/crates/rspack_plugin_javascript/src/visitors/dependency/parser/walk.rs b/crates/rspack_plugin_javascript/src/visitors/dependency/parser/walk.rs index 9666c8a170bb..8648a19b30e3 100644 --- a/crates/rspack_plugin_javascript/src/visitors/dependency/parser/walk.rs +++ b/crates/rspack_plugin_javascript/src/visitors/dependency/parser/walk.rs @@ -78,7 +78,7 @@ impl<'parser> JavascriptParser<'parser> { self.in_tagged_template_tag = old_in_tagged_template_tag; } - fn in_function_scope<'a, I, F>(&mut self, has_this: bool, params: I, f: F) + pub(crate) fn in_function_scope<'a, I, F>(&mut self, has_this: bool, params: I, f: F) where F: FnOnce(&mut Self), I: Iterator>, diff --git a/crates/rspack_plugin_javascript/src/visitors/dependency/util.rs b/crates/rspack_plugin_javascript/src/visitors/dependency/util.rs index 631c81d7a23c..4bef9c420e8d 100644 --- a/crates/rspack_plugin_javascript/src/visitors/dependency/util.rs +++ b/crates/rspack_plugin_javascript/src/visitors/dependency/util.rs @@ -197,13 +197,15 @@ pub(crate) mod expr_matcher { is_webpack_module_id: "__webpack_module__.id", is_object_define_property: "Object.defineProperty", is_require_ensure: "require.ensure", + is_require_version: "require.version", + is_require_amd: "require.amd", + is_require_onerror: "require.onError", + is_requirejs_onerror: "requirejs.onError", + is_define_amd: "define.amd", // unsupported is_require_extensions: "require.extensions", is_require_config: "require.config", - is_require_version: "require.version", - is_require_amd: "require.amd", is_require_include: "require.include", - is_require_onerror: "require.onError", is_require_main_require: "require.main.require", is_module_parent_require: "module.parent.require", }); diff --git a/crates/rspack_plugin_library/src/assign_library_plugin.rs b/crates/rspack_plugin_library/src/assign_library_plugin.rs index 12aaacc41137..5a848125ed2d 100644 --- a/crates/rspack_plugin_library/src/assign_library_plugin.rs +++ b/crates/rspack_plugin_library/src/assign_library_plugin.rs @@ -268,7 +268,7 @@ fn render_startup( exports = "__webpack_exports_export__"; } source.add(RawSource::from(format!( - "for(var i in {exports}) __webpack_export_target__[i] = {exports}[i];\n" + "for(var __webpack_i__ in {exports}) __webpack_export_target__[__webpack_i__] = {exports}[__webpack_i__];\n" ))); source.add(RawSource::from(format!( "if({exports}.__esModule) Object.defineProperty(__webpack_export_target__, '__esModule', {{ value: true }});\n" diff --git a/crates/rspack_plugin_mf/src/sharing/consume_shared_plugin.rs b/crates/rspack_plugin_mf/src/sharing/consume_shared_plugin.rs index 3d957477f47c..3811cf47a134 100644 --- a/crates/rspack_plugin_mf/src/sharing/consume_shared_plugin.rs +++ b/crates/rspack_plugin_mf/src/sharing/consume_shared_plugin.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::collections::HashSet; use std::sync::LazyLock; use std::sync::Mutex; use std::{fmt, path::Path, sync::Arc}; @@ -98,19 +98,31 @@ fn resolve_matched_configs( } } -async fn get_description_file(mut dir: &Path) -> Option<(PathBuf, serde_json::Value)> { +async fn get_description_file( + mut dir: &Path, + satisfies_description_file_data: Option) -> bool>, +) -> (Option, Option>) { let description_filename = "package.json"; + let mut checked_file_paths = HashSet::new(); + loop { let description_file = dir.join(description_filename); if let Ok(data) = tokio::fs::read(&description_file).await && let Ok(data) = serde_json::from_slice::(&data) { - return Some((description_file, data)); + if satisfies_description_file_data + .as_ref() + .map_or(false, |f| !f(Some(data.clone()))) + { + checked_file_paths.insert(description_file.to_string_lossy().to_string()); + } else { + return (Some(data), None); + } } if let Some(parent) = dir.parent() { dir = parent; } else { - return None; + return (None, Some(checked_file_paths.into_iter().collect())); } } } @@ -226,29 +238,55 @@ impl ConsumeSharedPlugin { required_version_warning("Unable to extract the package name from request."); return None; }; - if let Some(package_name) = package_name - && let Some((description_path, data)) = get_description_file(context.as_ref()).await - { - if let Some(name) = data.get("name").and_then(|n| n.as_str()) - && name == package_name - { - // Package self-referencing + + if let Some(package_name) = package_name { + let (data, checked_description_file_paths) = get_description_file( + context.as_ref(), + Some(|data: Option| { + if let Some(data) = data { + let name_matches = data + .get("name") + .and_then(|n| n.as_str()) + .map_or(false, |name| name == package_name); + let version_matches = get_required_version_from_description_file(data, package_name) + .map_or(false, |version| { + matches!(version, ConsumeVersion::Version(_)) + }); + name_matches || version_matches + } else { + false + } + }), + ) + .await; + + if let Some(data) = data { + if let Some(name) = data.get("name").and_then(|n| n.as_str()) + && name == package_name + { + // Package self-referencing + return None; + } + return get_required_version_from_description_file(data, package_name); + } else { + if let Some(file_paths) = checked_description_file_paths + && !file_paths.is_empty() + { + required_version_warning(&format!( + "Unable to find required version for \"{package_name}\" in description file/s\n{}\nIt need to be in dependencies, devDependencies or peerDependencies.", + file_paths.join("\n") + )); + } else { + required_version_warning(&format!( + "Unable to find description file in {}", + context.as_str() + )); + } return None; } - get_required_version_from_description_file(data, package_name).or_else(|| { - required_version_warning(&format!( - "Unable to find required version for \"{package_name}\" in description file ({}). It need to be in dependencies, devDependencies or peerDependencies.", - description_path.display(), - )); - None - }) - } else { - required_version_warning(&format!( - "Unable to find description file in {}", - context.as_str() - )); - None } + + None } } diff --git a/crates/rspack_plugin_progress/src/lib.rs b/crates/rspack_plugin_progress/src/lib.rs index dc6cceeb4d01..8da2437b0eca 100644 --- a/crates/rspack_plugin_progress/src/lib.rs +++ b/crates/rspack_plugin_progress/src/lib.rs @@ -282,6 +282,14 @@ async fn this_compilation( _compilation: &mut Compilation, _params: &mut CompilationParams, ) -> Result<()> { + if let ProgressPluginOptions::Default(options) = &self.options { + let progress_bar = self.progress_bar.as_ref().unwrap_or_else(|| unreachable!()); + if !options.profile { + progress_bar.reset(); + progress_bar.set_prefix(options.prefix.clone()); + } + } + self.handler( 0.08, "setup".to_string(), @@ -306,14 +314,6 @@ async fn compilation( #[plugin_hook(CompilerMake for ProgressPlugin)] async fn make(&self, _compilation: &mut Compilation) -> Result<()> { - if let ProgressPluginOptions::Default(options) = &self.options { - let progress_bar = self.progress_bar.as_ref().unwrap_or_else(|| unreachable!()); - if !options.profile { - progress_bar.reset(); - progress_bar.set_prefix(options.prefix.clone()); - } - } - self.handler(0.1, String::from("make"), vec![], None)?; self.modules_count.store(0, Relaxed); self.modules_done.store(0, Relaxed); @@ -394,6 +394,11 @@ async fn finish_make(&self, _compilation: &mut Compilation) -> Result<()> { ) } +#[plugin_hook(CompilationFinishModules for ProgressPlugin)] +async fn finish_modules(&self, _compilation: &mut Compilation) -> Result<()> { + self.sealing_hooks_report("finish modules", 0) +} + #[plugin_hook(CompilationSeal for ProgressPlugin)] async fn seal(&self, _compilation: &mut Compilation) -> Result<()> { self.sealing_hooks_report("plugins", 1) @@ -405,11 +410,6 @@ fn optimize_dependencies(&self, _compilation: &mut Compilation) -> Result