Skip to content

Commit

Permalink
refactor: add more clippy rule (#7594)
Browse files Browse the repository at this point in the history
* refactor: enable more clippy rules

* chore: fix clippy rule

* chore: fix conclict
  • Loading branch information
hardfist authored Aug 21, 2024
1 parent 0d40d9f commit 82f49ce
Show file tree
Hide file tree
Showing 37 changed files with 188 additions and 127 deletions.
71 changes: 71 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -104,3 +104,74 @@ lto = "fat"
debug = true
inherits = "release"
strip = false

# the following lints rules are from https://github.com/biomejs/biome/blob/4bd3d6f09642952ee14445ed56af81a73796cea1/Cargo.toml#L7C1-L75C1
[workspace.lints.rust]
absolute_paths_not_starting_with_crate = "warn"
dead_code = "warn"
trivial_numeric_casts = "warn"
unused_import_braces = "warn"
unused_lifetimes = "warn"
unused_macro_rules = "warn"

[workspace.lints.clippy]
cargo_common_metadata = "allow"
empty_docs = "allow" # there are some false positives inside biome_wasm
multiple_crate_versions = "allow"

# pedantic
checked_conversions = "warn"
cloned_instead_of_copied = "warn"
copy_iterator = "warn"
dbg_macro = "warn"
doc_link_with_quotes = "warn"
empty_enum = "warn"
expl_impl_clone_on_copy = "warn"
explicit_into_iter_loop = "warn"
filter_map_next = "warn"
flat_map_option = "warn"
fn_params_excessive_bools = "warn"
from_iter_instead_of_collect = "warn"
implicit_clone = "warn"
# not sure whether it's necessary
# implicit_hasher = "warn"
index_refutable_slice = "warn"
inefficient_to_string = "warn"
invalid_upcast_comparisons = "warn"
iter_not_returning_iterator = "warn"
large_stack_arrays = "warn"
large_types_passed_by_value = "warn"
macro_use_imports = "warn"
manual_ok_or = "warn"
manual_string_new = "warn"
map_flatten = "warn"
map_unwrap_or = "warn"
mismatching_type_param_order = "warn"
mut_mut = "warn"
naive_bytecount = "warn"
needless_bitwise_bool = "warn"
needless_continue = "warn"
needless_for_each = "warn"
no_effect_underscore_binding = "warn"
ref_binding_to_reference = "warn"
ref_option_ref = "warn"
stable_sort_primitive = "warn"
unnecessary_box_returns = "warn"
unnecessary_join = "warn"
unnested_or_patterns = "warn"
unreadable_literal = "warn"
verbose_bit_mask = "warn"
zero_sized_map_values = "warn"

# restriction
empty_drop = "warn"
float_cmp_const = "warn"
get_unwrap = "warn"
infinite_loop = "warn"
lossy_float_literal = "warn"
rc_buffer = "warn"
rc_mutex = "warn"
rest_pat_in_fully_bound_structs = "warn"
verbose_file_reads = "warn"
# https://github.com/rustwasm/wasm-bindgen/issues/3944
#mem_forget = "warn"
3 changes: 3 additions & 0 deletions crates/rspack_core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,6 @@ ustr = { workspace = true }

[dev-dependencies]
pretty_assertions = { version = "1.4.0" }

[lints]
workspace = true
11 changes: 6 additions & 5 deletions crates/rspack_core/src/build_chunk_graph/code_splitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ impl<'me> CodeSplitter<'me> {
.compilation
.get_module_graph()
.module_identifier_by_dependency_id(dep)
.cloned()
.copied()
})
.collect::<Vec<_>>();

Expand Down Expand Up @@ -849,7 +849,7 @@ Or do you want to use the entrypoints '{name}' and '{runtime}' independently on
.get_module_graph()
.modules()
.keys()
.cloned()
.copied()
.collect::<Vec<_>>();
for module_identifier in ids {
self.compilation.chunk_graph.add_module(module_identifier)
Expand Down Expand Up @@ -1278,8 +1278,9 @@ Or do you want to use the entrypoints '{name}' and '{runtime}' independently on
entry_options
.chunk_loading
.as_ref()
.map(|x| !matches!(x, ChunkLoading::Disable))
.unwrap_or(item_chunk_group_info.chunk_loading),
.map_or(item_chunk_group_info.chunk_loading, |x| {
!matches!(x, ChunkLoading::Disable)
}),
entry_options
.async_chunks
.unwrap_or(item_chunk_group_info.async_chunks),
Expand Down Expand Up @@ -1554,7 +1555,7 @@ Or do you want to use the entrypoints '{name}' and '{runtime}' independently on
let chunk_group_ukey = chunk_group_info.chunk_group;

// 1. Add new targets to the list of children
chunk_group_info.children.extend(targets.iter().cloned());
chunk_group_info.children.extend(targets.iter().copied());

// 2. Calculate resulting available modules
let resulting_available_modules = chunk_group_info
Expand Down
14 changes: 5 additions & 9 deletions crates/rspack_core/src/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -532,13 +532,9 @@ impl Chunk {
order_children
.iter()
.flat_map(|(_, child_chunks)| {
child_chunks.iter().filter_map(|chunk_ukey| {
compilation
.chunk_by_ukey
.expect_get(chunk_ukey)
.id
.to_owned()
})
child_chunks
.iter()
.filter_map(|chunk_ukey| compilation.chunk_by_ukey.expect_get(chunk_ukey).id.clone())
})
.collect_vec()
})
Expand All @@ -562,7 +558,7 @@ impl Chunk {
) {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
if let (Some(chunk_id), Some(child_chunk_ids)) = (
chunk.id.to_owned(),
chunk.id.clone(),
chunk.get_child_ids_by_order(order, compilation),
) {
result
Expand All @@ -577,7 +573,7 @@ impl Chunk {
.get_sorted_groups_iter(&compilation.chunk_group_by_ukey)
.filter_map(|chunk_group_ukey| {
get_chunk_group_from_ukey(chunk_group_ukey, &compilation.chunk_group_by_ukey)
.map(|g| g.chunks.to_owned())
.map(|g| g.chunks.clone())
})
.flatten()
{
Expand Down
14 changes: 7 additions & 7 deletions crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ impl ChunkGraph {

let old_cgm = self.get_chunk_graph_module(*old_module_id);
// Using clone to avoid using mutable borrow and immutable borrow at the same time.
for chunk in old_cgm.chunks.clone().into_iter() {
for chunk in old_cgm.chunks.clone() {
let cgc = self.get_chunk_graph_chunk_mut(chunk);
cgc.modules.remove(old_module_id);
cgc.modules.insert(*new_module_id);
Expand All @@ -100,9 +100,9 @@ impl ChunkGraph {
let old_cgm = self.get_chunk_graph_module_mut(*old_module_id);
old_cgm.chunks.clear();

for chunk in old_cgm.entry_in_chunks.clone().into_iter() {
for chunk in old_cgm.entry_in_chunks.clone() {
let cgc = self.get_chunk_graph_chunk_mut(chunk);
if let Some(old) = cgc.entry_modules.get(old_module_id).cloned() {
if let Some(old) = cgc.entry_modules.get(old_module_id).copied() {
let mut new_entry_modules = LinkedHashMap::default();
for (m, cg) in cgc.entry_modules.iter() {
if m == old_module_id {
Expand All @@ -122,7 +122,7 @@ impl ChunkGraph {
old_cgm.entry_in_chunks.clear();
let old_cgm = self.get_chunk_graph_module(*old_module_id);

for chunk in old_cgm.runtime_in_chunks.clone().into_iter() {
for chunk in old_cgm.runtime_in_chunks.clone() {
let cgc = self.get_chunk_graph_chunk_mut(chunk);
// delete old module
cgc.runtime_modules = std::mem::take(&mut cgc.runtime_modules)
Expand Down Expand Up @@ -155,7 +155,7 @@ impl ChunkGraph {
pub fn get_chunk_entry_modules(&self, chunk_ukey: &ChunkUkey) -> Vec<ModuleIdentifier> {
let chunk_graph_chunk = self.get_chunk_graph_chunk(chunk_ukey);

chunk_graph_chunk.entry_modules.keys().cloned().collect()
chunk_graph_chunk.entry_modules.keys().copied().collect()
}

pub fn get_chunk_entry_modules_with_chunk_group_iterable(
Expand Down Expand Up @@ -476,7 +476,7 @@ impl ChunkGraph {
module_graph: &ModuleGraph,
) -> Vec<ModuleIdentifier> {
let cgc = self.get_chunk_graph_chunk(chunk);
let mut input = cgc.modules.iter().cloned().collect::<Vec<_>>();
let mut input = cgc.modules.iter().copied().collect::<Vec<_>>();
input.sort_unstable();
let mut modules = find_graph_roots(input, |module| {
let mut set: IdentifierSet = Default::default();
Expand Down Expand Up @@ -825,6 +825,6 @@ impl ChunkGraph {

None
})
.unwrap_or(module.source_types().iter().cloned().collect())
.unwrap_or(module.source_types().iter().copied().collect())
}
}
8 changes: 4 additions & 4 deletions crates/rspack_core/src/compiler/compilation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -924,7 +924,7 @@ impl Compilation {

let chunk_ukey_and_manifest = results.into_inner();

for result in chunk_ukey_and_manifest.into_iter() {
for result in chunk_ukey_and_manifest {
let (chunk_ukey, manifest, diagnostics) = result?;
self.extend_diagnostics(diagnostics);

Expand Down Expand Up @@ -1005,7 +1005,7 @@ impl Compilation {
// so use filter_map to ignore them
module_graph
.module_identifier_by_dependency_id(dep_id)
.cloned()
.copied()
})
.collect()
}
Expand All @@ -1031,7 +1031,7 @@ impl Compilation {
.values()
.flat_map(|item| item.all_dependencies())
.chain(self.global_entry.all_dependencies())
.cloned()
.copied()
.collect(),
)],
)?;
Expand Down Expand Up @@ -1277,7 +1277,7 @@ impl Compilation {
let entrypoint = self.chunk_group_by_ukey.expect_get(entrypoint_ukey);
entrypoint.get_runtime_chunk(&self.chunk_group_by_ukey)
});
UkeySet::from_iter(entries.chain(async_entries))
entries.chain(async_entries).collect()
}

#[allow(clippy::unwrap_in_result)]
Expand Down
15 changes: 7 additions & 8 deletions crates/rspack_core/src/compiler/hmr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,13 @@ where
let (old_all_modules, old_runtime_modules) = collect_changed_modules(old.compilation)?;
// TODO: should use `records`

let all_old_runtime = RuntimeSpec::from_iter(
old
.compilation
.get_chunk_graph_entries()
.into_iter()
.filter_map(|entry_ukey| get_chunk_from_ukey(&entry_ukey, &old.compilation.chunk_by_ukey))
.flat_map(|entry_chunk| entry_chunk.runtime.clone()),
);
let all_old_runtime = old
.compilation
.get_chunk_graph_entries()
.into_iter()
.filter_map(|entry_ukey| get_chunk_from_ukey(&entry_ukey, &old.compilation.chunk_by_ukey))
.flat_map(|entry_chunk| entry_chunk.runtime.clone())
.collect();

let mut old_chunks: Vec<(String, RuntimeSpec)> = vec![];
for (_, chunk) in old.compilation.chunk_by_ukey.iter() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl FixBuildMeta {

pub fn fix_artifact(self, artifact: &mut MakeArtifact) {
let mut module_graph = artifact.get_module_graph_mut();
for (id, build_meta) in self.origin_module_build_meta.into_iter() {
for (id, build_meta) in self.origin_module_build_meta {
if let Some(module) = module_graph.module_by_identifier_mut(&id) {
if let Some(module) = module.as_normal_module_mut() {
if matches!(module.source(), NormalModuleSource::BuiltFailed(_)) {
Expand Down
2 changes: 1 addition & 1 deletion crates/rspack_core/src/compiler/make/cutout/fix_issuers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl FixIssuers {

pub fn fix_artifact(self, artifact: &mut MakeArtifact) {
let mut module_graph = artifact.get_module_graph_mut();
for (id, issuer) in self.origin_module_issuers.into_iter() {
for (id, issuer) in self.origin_module_issuers {
if let Some(mgm) = module_graph.module_graph_module_by_identifier_mut(&id) {
mgm.set_issuer(issuer);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ mod t {
self
.ids
.iter()
.map(|id| id.to_string().into())
.map(|id| (*id).to_string().into())
.collect_vec()
}
}
Expand Down
2 changes: 1 addition & 1 deletion crates/rspack_core/src/compiler/make/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ pub fn make_module_graph(
.values()
.flat_map(|item| item.all_dependencies())
.chain(compilation.global_entry.all_dependencies())
.cloned()
.copied()
.collect(),
));
}
Expand Down
1 change: 0 additions & 1 deletion crates/rspack_core/src/compiler/make/repair/factorize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,6 @@ impl Task<MakeTaskContext> for FactorizeResultTask {
context_dependencies,
missing_dependencies,
diagnostics,
..
} = *self;
let artifact = &mut context.artifact;
if !diagnostics.is_empty() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ impl Task<MakeTaskContext> for ProcessDependenciesTask {
let module_graph =
&mut MakeTaskContext::get_module_graph_mut(&mut context.artifact.module_graph_partial);

dependencies.into_iter().for_each(|dependency_id| {
for dependency_id in dependencies {
let dependency = module_graph
.dependency_by_id(&dependency_id)
.expect("should have dependency");
Expand Down Expand Up @@ -59,7 +59,7 @@ impl Task<MakeTaskContext> for ProcessDependenciesTask {
.or_insert(vec![])
.push(dependency_id);
}
});
}

let module = module_graph
.module_by_identifier(&original_module_identifier)
Expand Down
3 changes: 1 addition & 2 deletions crates/rspack_core/src/compiler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,8 +313,7 @@ where
if let Some(source) = asset.get_source() {
let filename = filename
.split_once('?')
.map(|(filename, _query)| filename)
.unwrap_or(filename);
.map_or(filename, |(filename, _query)| filename);
let file_path = output_path.join(filename);
self
.output_filesystem
Expand Down
11 changes: 5 additions & 6 deletions crates/rspack_core/src/compiler/module_executor/execute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ impl Task<MakeTaskContext> for ExecuteTask {

chunk.id = chunk.name.clone();
chunk.ids = vec![chunk.id.clone().expect("id is set")];
let runtime = RuntimeSpec::from_iter(once("build time".into()));
let runtime: RuntimeSpec = once("build time".into()).collect();

chunk.runtime = runtime.clone();

Expand Down Expand Up @@ -165,7 +165,7 @@ impl Task<MakeTaskContext> for ExecuteTask {
Handle::current().block_on(async {
compilation
.process_runtime_requirements(
Vec::from_iter(modules.iter().copied()),
modules.iter().copied().collect::<Vec<_>>(),
once(chunk_ukey),
once(chunk_ukey),
compilation.plugin_driver.clone(),
Expand Down Expand Up @@ -257,7 +257,7 @@ impl Task<MakeTaskContext> for ExecuteTask {

result.id = id;

modules.iter().for_each(|m| {
for m in modules.iter() {
let codegen_result = codegen_results.get(m, Some(&runtime));

if let Some(source) = codegen_result.get(&SourceType::Asset)
Expand All @@ -270,7 +270,7 @@ impl Task<MakeTaskContext> for ExecuteTask {
CompilationAsset::new(Some(source.clone()), asset_info.inner().clone()),
);
}
});
}

Ok(result)
}
Expand Down Expand Up @@ -305,8 +305,7 @@ impl Task<MakeTaskContext> for ExecuteTask {
cacheable: runtime_module.cacheable(),
size: runtime_module_size
.get(&identifier)
.map(|s| s.to_owned())
.unwrap_or(0 as f64),
.map_or(0 as f64, |s| s.to_owned()),
}
})
.collect_vec();
Expand Down
Loading

2 comments on commit 82f49ce

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Benchmark detail: Open

Name Base (2024-08-21 dd6e27a) Current Change
10000_development-mode + exec 2.36 s ± 30 ms 2.37 s ± 23 ms +0.61 %
10000_development-mode_hmr + exec 712 ms ± 10 ms 721 ms ± 15 ms +1.32 %
10000_production-mode + exec 3.02 s ± 24 ms 3.03 s ± 17 ms +0.54 %
arco-pro_development-mode + exec 1.91 s ± 60 ms 1.92 s ± 63 ms +0.28 %
arco-pro_development-mode_hmr + exec 438 ms ± 3.3 ms 437 ms ± 3 ms -0.15 %
arco-pro_production-mode + exec 3.47 s ± 86 ms 3.49 s ± 68 ms +0.58 %
arco-pro_production-mode_generate-package-json-webpack-plugin + exec 3.52 s ± 83 ms 3.56 s ± 84 ms +1.18 %
threejs_development-mode_10x + exec 1.69 s ± 19 ms 1.68 s ± 21 ms -0.54 %
threejs_development-mode_10x_hmr + exec 797 ms ± 6 ms 796 ms ± 8.4 ms -0.17 %
threejs_production-mode_10x + exec 5.48 s ± 33 ms 5.54 s ± 32 ms +1.23 %

@rspack-bot
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

📝 Ran ecosystem CI: Open

suite result
modernjs ✅ success
_selftest ✅ success
nx ❌ failure
rspress ✅ success
rslib ✅ success
rsbuild ❌ failure
examples ✅ success

Please sign in to comment.