Skip to content

Commit

Permalink
chore: resolve clippy::cloned_instead_of_copied and `clippy::from_i…
Browse files Browse the repository at this point in the history
…ter_instead_of_collect` lints (#225)

# Rationale for this change

We have cargo clippy running in our CI in order to enforce code quality.
In order to increase our standards, we should enable the
clippy::pedantic lint group.

# What changes are included in this PR?

Resolved `clippy::cloned_instead_of_copied` and
`clippy::from_iter_instead_of_collect` lints warnings.

# Are these changes tested?

Yes.

---------

Co-authored-by: Jay White <[email protected]>
  • Loading branch information
mehulmathur16 and JayWhite2357 authored Oct 7, 2024
1 parent 6237221 commit 2cbff77
Show file tree
Hide file tree
Showing 18 changed files with 198 additions and 163 deletions.
4 changes: 3 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -77,4 +77,6 @@ unnested_or_patterns = "deny"
unreadable_literal = "deny"
must_use_candidate = "deny"
range_plus_one = "deny"
cast_lossless = "deny"
cloned_instead_of_copied = "deny"
from_iter_instead_of_collect = "deny"
cast_lossless = "deny"
10 changes: 4 additions & 6 deletions crates/proof-of-sql/benches/scaffold/random_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,10 @@ pub fn generate_random_columns<'a, S: Scalar>(
let len = rng
.gen_range(0..=bound.map(|b| b(num_rows) as usize).unwrap_or(10));
alloc.alloc_str(
String::from_iter(
rng.sample_iter(&rand::distributions::Alphanumeric)
.take(len)
.map(char::from),
)
.as_str(),
&rng.sample_iter(&rand::distributions::Alphanumeric)
.take(len)
.map(char::from)
.collect::<String>(),
) as &str
});
Column::VarChar((
Expand Down
2 changes: 1 addition & 1 deletion crates/proof-of-sql/src/base/commitment/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ impl Commitment for RistrettoPoint {
offset: usize,
_setup: &Self::PublicSetup<'_>,
) -> Vec<Self> {
let sequences = Vec::from_iter(committable_columns.iter().map(Into::into));
let sequences: Vec<_> = committable_columns.iter().map(Into::into).collect();
let mut compressed_commitments = vec![Default::default(); committable_columns.len()];
blitzar::compute::compute_curve25519_commitments(
&mut compressed_commitments,
Expand Down
11 changes: 5 additions & 6 deletions crates/proof-of-sql/src/base/commitment/query_commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,11 @@ impl<C: Commitment> QueryCommitmentsExt<C> for QueryCommitments<C> {
table_ref,
TableCommitment::from_accessor_with_max_bounds(
table_ref,
&Vec::from_iter(
accessor
.lookup_schema(table_ref)
.iter()
.filter_map(|c| columns.iter().find(|x| x.name() == c.0).copied()),
),
&accessor
.lookup_schema(table_ref)
.iter()
.filter_map(|c| columns.iter().find(|x| x.name() == c.0).copied())
.collect::<Vec<_>>(),
accessor,
),
)
Expand Down
32 changes: 20 additions & 12 deletions crates/proof-of-sql/src/base/commitment/vec_commitment_ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,9 +217,10 @@ mod tests {
],
0,
);
let expected_commitments =
Vec::from_iter(expected_commitments.iter().map(|c| c.decompress().unwrap()));

let expected_commitments: Vec<_> = expected_commitments
.iter()
.map(|c| c.decompress().unwrap())
.collect();
assert_eq!(commitments, expected_commitments);
}

Expand Down Expand Up @@ -258,8 +259,10 @@ mod tests {
],
0,
);
let expected_commitments =
Vec::from_iter(expected_commitments.iter().map(|c| c.decompress().unwrap()));
let expected_commitments: Vec<_> = expected_commitments
.iter()
.map(|c| c.decompress().unwrap())
.collect();

assert_eq!(commitments, expected_commitments);
}
Expand Down Expand Up @@ -343,8 +346,10 @@ mod tests {
],
0,
);
let expected_commitments =
Vec::from_iter(expected_commitments.iter().map(|c| c.decompress().unwrap()));
let expected_commitments: Vec<_> = expected_commitments
.iter()
.map(|c| c.decompress().unwrap())
.collect();

assert_eq!(commitments, expected_commitments);
}
Expand Down Expand Up @@ -384,9 +389,10 @@ mod tests {
],
0,
);
let expected_commitments =
Vec::from_iter(expected_commitments.iter().map(|c| c.decompress().unwrap()));

let expected_commitments: Vec<_> = expected_commitments
.iter()
.map(|c| c.decompress().unwrap())
.collect();
assert_eq!(commitments, expected_commitments);
}

Expand Down Expand Up @@ -460,8 +466,10 @@ mod tests {
],
3,
);
let expected_commitments =
Vec::from_iter(expected_commitments.iter().map(|c| c.decompress().unwrap()));
let expected_commitments: Vec<_> = expected_commitments
.iter()
.map(|c| c.decompress().unwrap())
.collect();

assert_eq!(commitments, expected_commitments);
}
Expand Down
9 changes: 4 additions & 5 deletions crates/proof-of-sql/src/base/database/filter_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,10 @@ pub fn filter_columns<'a, S: Scalar>(
.map(|(i, _)| i)
.collect();
let result_length = indexes.len();
let filtered_result = Vec::from_iter(
columns
.iter()
.map(|column| filter_column_by_index(alloc, column, &indexes)),
);
let filtered_result: Vec<_> = columns
.iter()
.map(|column| filter_column_by_index(alloc, column, &indexes))
.collect();
(filtered_result, result_length)
}
/// This function takes an index vector and a `Column` and returns a
Expand Down
49 changes: 28 additions & 21 deletions crates/proof-of-sql/src/base/database/group_by_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,12 @@ pub fn aggregate_columns<'a, S: Scalar>(

// `filtered_indexes` is a vector of indexes of the rows that are selected. We sort this vector
// so that all the rows in the same group are next to each other.
let mut filtered_indexes = Vec::from_iter(
selection_column_in
.iter()
.enumerate()
.filter(|&(_, &b)| b)
.map(|(i, _)| i),
);
let mut filtered_indexes: Vec<_> = selection_column_in
.iter()
.enumerate()
.filter(|&(_, &b)| b)
.map(|(i, _)| i)
.collect();
if_rayon!(
filtered_indexes.par_sort_unstable_by(|&a, &b| compare_indexes_by_columns(
group_by_columns_in,
Expand All @@ -96,25 +95,33 @@ pub fn aggregate_columns<'a, S: Scalar>(
compare_indexes_by_columns(group_by_columns_in, a, b) == Ordering::Equal
})
.multiunzip();
let group_by_columns_out = Vec::from_iter(
group_by_columns_in
.iter()
.map(|column| filter_column_by_index(alloc, column, &group_by_result_indexes)),
);
let group_by_columns_out: Vec<_> = group_by_columns_in
.iter()
.map(|column| filter_column_by_index(alloc, column, &group_by_result_indexes))
.collect();

// This calls the `sum_aggregate_column_by_index_counts` function on each column in `sum_columns`
// and gives a vector of `S` slices
let sum_columns_out = Vec::from_iter(sum_columns_in.iter().map(|column| {
sum_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
}));
let sum_columns_out: Vec<_> = sum_columns_in
.iter()
.map(|column| {
sum_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
})
.collect();

let max_columns_out = Vec::from_iter(max_columns_in.iter().map(|column| {
max_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
}));
let max_columns_out: Vec<_> = max_columns_in
.iter()
.map(|column| {
max_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
})
.collect();

let min_columns_out = Vec::from_iter(min_columns_in.iter().map(|column| {
min_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
}));
let min_columns_out: Vec<_> = min_columns_in
.iter()
.map(|column| {
min_aggregate_column_by_index_counts(alloc, column, &counts, &filtered_indexes)
})
.collect();

// Cast the counts to something compatible with BigInt.
let count_column_out = alloc.alloc_slice_fill_iter(counts.into_iter().map(|c| c as i64));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ impl TestSchemaAccessor {

impl SchemaAccessor for TestSchemaAccessor {
fn lookup_column(&self, table_ref: TableRef, column_id: Identifier) -> Option<ColumnType> {
self.schemas.get(&table_ref)?.get(&column_id).cloned()
self.schemas.get(&table_ref)?.get(&column_id).copied()
}

fn lookup_schema(&self, table_ref: TableRef) -> Vec<(Identifier, ColumnType)> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,9 @@ mod tests {
use ark_std::UniformRand;
let mut rng = ark_std::test_rng();
for num_vars in 0..20 {
let point =
Vec::from_iter(core::iter::repeat_with(|| F::rand(&mut rng)).take(num_vars));
let point: Vec<_> = core::iter::repeat_with(|| F::rand(&mut rng))
.take(num_vars)
.collect();
let (lo_vec, hi_vec) = compute_dynamic_vecs(&point);
let mut eval_vec = vec![F::ZERO; 1 << num_vars];
compute_evaluation_vector(&mut eval_vec, &point);
Expand All @@ -194,8 +195,9 @@ mod tests {
let mut rng = test_rng();
for num_vars in 0..10 {
let nu = num_vars / 2 + 1;
let point =
Vec::from_iter(core::iter::repeat_with(|| F::rand(&mut rng)).take(num_vars));
let point: Vec<_> = core::iter::repeat_with(|| F::rand(&mut rng))
.take(num_vars)
.collect();
let alphas = core::iter::repeat_with(|| F::rand(&mut rng))
.take(nu)
.collect_vec();
Expand Down
6 changes: 3 additions & 3 deletions crates/proof-of-sql/src/proof_primitive/dory/setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ impl<'a> ProverSetup<'a> {
assert_eq!(Gamma_1.len(), 1 << max_nu);
assert_eq!(Gamma_2.len(), 1 << max_nu);
#[cfg(feature = "blitzar")]
let blitzar_handle = blitzar::compute::MsmHandle::new(&Vec::from_iter(
Gamma_1.iter().copied().map(Into::into),
));
let blitzar_handle = blitzar::compute::MsmHandle::new(
&Gamma_1.iter().copied().map(Into::into).collect::<Vec<_>>(),
);
let (Gamma_1, Gamma_2): (Vec<_>, Vec<_>) = (0..=max_nu)
.map(|k| (&Gamma_1[..1 << k], &Gamma_2[..1 << k]))
.unzip();
Expand Down
26 changes: 14 additions & 12 deletions crates/proof-of-sql/src/proof_primitive/dory/vmv_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,18 +110,20 @@ impl VMV {
pub(super) fn calculate_prover_state(&self, setup: &super::ProverSetup) -> VMVProverState {
use super::G1Projective;
use ark_ec::VariableBaseMSM;
let v_vec = Vec::from_iter((0..self.R.len()).map(|i| {
self.L
.iter()
.zip(self.M.iter())
.map(|(l, row)| row[i] * l)
.sum()
}));
let T_vec_prime = Vec::from_iter(
self.M
.iter()
.map(|row| G1Projective::msm_unchecked(setup.Gamma_1[self.nu], row).into()),
);
let v_vec: Vec<_> = (0..self.R.len())
.map(|i| {
self.L
.iter()
.zip(self.M.iter())
.map(|(l, row)| row[i] * l)
.sum()
})
.collect();
let T_vec_prime: Vec<_> = self
.M
.iter()
.map(|row| G1Projective::msm_unchecked(setup.Gamma_1[self.nu], row).into())
.collect();
VMVProverState {
v_vec,
T_vec_prime,
Expand Down
5 changes: 4 additions & 1 deletion crates/proof-of-sql/src/sql/parse/query_expr_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1886,7 +1886,10 @@ fn select_group_and_order_by_preserve_the_column_order_reference() {
let (t, accessor) = get_test_accessor();
let base_cols: [&str; N] = ["i", "i0", "i1", "s"]; // sorted because of `select: [cols = ... ]`
let base_ordering = [Asc, Desc, Asc, Desc];
for (idx, perm_cols) in IndexSet::from_iter(base_cols.into_iter().permutations(N))
for (idx, perm_cols) in base_cols
.into_iter()
.permutations(N)
.collect::<IndexSet<_>>()
.into_iter()
.enumerate()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ fn check_and_get_aggregation_and_remainder(
let free_identifiers = get_free_identifiers_from_expr(&expr.expr);
let group_by_identifier_set = group_by_identifiers
.iter()
.cloned()
.copied()
.collect::<IndexSet<_>>();
if contains_nested_aggregation(&expr.expr, false) {
return Err(PostprocessingError::NestedAggregationInGroupByClause {
Expand Down Expand Up @@ -386,15 +386,15 @@ mod tests {

// a + b + 1
let expr = add(add(col("a"), col("b")), lit(1));
let expected: IndexSet<Identifier> = [ident("a"), ident("b")].iter().cloned().collect();
let expected: IndexSet<Identifier> = [ident("a"), ident("b")].iter().copied().collect();
let actual = get_free_identifiers_from_expr(&expr);
assert_eq!(actual, expected);

// ! (a == b || c >= a)
let expr = not(or(equal(col("a"), col("b")), ge(col("c"), col("a"))));
let expected: IndexSet<Identifier> = [ident("a"), ident("b"), ident("c")]
.iter()
.cloned()
.copied()
.collect();
let actual = get_free_identifiers_from_expr(&expr);
assert_eq!(actual, expected);
Expand All @@ -407,7 +407,7 @@ mod tests {

// (COUNT(a + b) + c) * d
let expr = mul(add(count(add(col("a"), col("b"))), col("c")), col("d"));
let expected: IndexSet<Identifier> = [ident("c"), ident("d")].iter().cloned().collect();
let expected: IndexSet<Identifier> = [ident("c"), ident("d")].iter().copied().collect();
let actual = get_free_identifiers_from_expr(&expr);
assert_eq!(actual, expected);
}
Expand Down
4 changes: 2 additions & 2 deletions crates/proof-of-sql/src/sql/proof_exprs/literal_expr_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ fn test_random_tables_with_given_offset(offset: usize) {
// Calculate/compare expected result
let (expected_a, expected_b, expected_c): (Vec<bool>, Vec<String>, Vec<i64>) = if lit {
(
data["a"].bool_iter().cloned().collect(),
data["a"].bool_iter().copied().collect(),
data["b"].string_iter().cloned().collect(),
data["c"].i64_iter().cloned().collect(),
data["c"].i64_iter().copied().collect(),
)
} else {
(vec![], vec![], vec![])
Expand Down
33 changes: 19 additions & 14 deletions crates/proof-of-sql/src/sql/proof_plans/filter_exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ where
},
)?;
// 4. filtered_columns
let filtered_columns_evals = Vec::from_iter(
repeat_with(|| builder.consume_intermediate_mle()).take(self.aliased_results.len()),
);
let filtered_columns_evals: Vec<_> = repeat_with(|| builder.consume_intermediate_mle())
.take(self.aliased_results.len())
.collect();
assert!(filtered_columns_evals.len() == self.aliased_results.len());

let alpha = builder.consume_post_result_challenge();
Expand Down Expand Up @@ -167,11 +167,16 @@ impl<C: Commitment> ProverEvaluate<C::Scalar> for FilterExec<C> {
.expect("selection is not boolean");

// 2. columns
let columns = Vec::from_iter(self.aliased_results.iter().map(|aliased_expr| {
aliased_expr
.expr
.result_evaluate(builder.table_length(), alloc, accessor)
}));
let columns: Vec<_> = self
.aliased_results
.iter()
.map(|aliased_expr| {
aliased_expr
.expr
.result_evaluate(builder.table_length(), alloc, accessor)
})
.collect();

// Compute filtered_columns and indexes
let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection);
// 3. set indexes
Expand All @@ -196,15 +201,15 @@ impl<C: Commitment> ProverEvaluate<C::Scalar> for FilterExec<C> {
.expect("selection is not boolean");

// 2. columns
let columns = Vec::from_iter(
self.aliased_results
.iter()
.map(|aliased_expr| aliased_expr.expr.prover_evaluate(builder, alloc, accessor)),
);
let columns: Vec<_> = self
.aliased_results
.iter()
.map(|aliased_expr| aliased_expr.expr.prover_evaluate(builder, alloc, accessor))
.collect();
// Compute filtered_columns and indexes
let (filtered_columns, result_len) = filter_columns(alloc, &columns, selection);
// 3. Produce MLEs
filtered_columns.iter().cloned().for_each(|column| {
filtered_columns.iter().copied().for_each(|column| {
builder.produce_intermediate_mle(column);
});

Expand Down
Loading

0 comments on commit 2cbff77

Please sign in to comment.