Skip to content

Commit

Permalink
Remove unused_variables
Browse files Browse the repository at this point in the history
  • Loading branch information
kayabaNerve committed Sep 27, 2023
1 parent 3b01d30 commit 01a4b9e
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 27 deletions.
19 changes: 12 additions & 7 deletions coordinator/src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,24 @@ impl<D: Db> MainDb<D> {
res
}

fn first_preprocess_key(id: [u8; 32]) -> Vec<u8> {
Self::main_key(b"first_preprocess", id)
}
pub fn save_first_preprocess(txn: &mut D::Transaction<'_>, id: [u8; 32], preprocess: Vec<u8>) {
let key = Self::first_preprocess_key(id);
fn first_preprocess_key(network: NetworkId, id: [u8; 32]) -> Vec<u8> {
Self::main_key(b"first_preprocess", (network, id).encode())
}
pub fn save_first_preprocess(
txn: &mut D::Transaction<'_>,
network: NetworkId,
id: [u8; 32],
preprocess: Vec<u8>,
) {
let key = Self::first_preprocess_key(network, id);
if let Some(existing) = txn.get(&key) {
assert_eq!(existing, preprocess, "saved a distinct first preprocess");
return;
}
txn.put(key, preprocess);
}
pub fn first_preprocess<G: Get>(getter: &G, id: [u8; 32]) -> Option<Vec<u8>> {
getter.get(Self::first_preprocess_key(id))
pub fn first_preprocess<G: Get>(getter: &G, network: NetworkId, id: [u8; 32]) -> Option<Vec<u8>> {
getter.get(Self::first_preprocess_key(network, id))
}

fn batch_key(network: NetworkId, id: u32) -> Vec<u8> {
Expand Down
14 changes: 4 additions & 10 deletions coordinator/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
#![allow(unused_variables)]

use core::{ops::Deref, future::Future};
use std::{
sync::Arc,
Expand Down Expand Up @@ -203,7 +201,6 @@ impl<FRid, F: Clone + Fn(NetworkId, [u8; 32], RecognizedIdType, [u8; 32], u32) -
{
}

#[allow(clippy::type_complexity)]
pub(crate) async fn scan_tributaries<
D: Db,
Pro: Processors,
Expand All @@ -214,7 +211,6 @@ pub(crate) async fn scan_tributaries<
raw_db: D,
key: Zeroizing<<Ristretto as Ciphersuite>::F>,
recognized_id: RID,
p2p: P,
processors: Pro,
serai: Arc<Serai>,
mut new_tributary: broadcast::Receiver<ActiveTributary<D, P>>,
Expand All @@ -229,7 +225,6 @@ pub(crate) async fn scan_tributaries<
let raw_db = raw_db.clone();
let key = key.clone();
let recognized_id = recognized_id.clone();
let p2p = p2p.clone();
let processors = processors.clone();
let serai = serai.clone();
async move {
Expand Down Expand Up @@ -305,7 +300,7 @@ pub async fn heartbeat_tributaries<D: Db, P: P2p>(

let mut readers = vec![];
loop {
while let Ok(ActiveTributary { spec, tributary }) = {
while let Ok(ActiveTributary { spec: _, tributary }) = {
match new_tributary.try_recv() {
Ok(tributary) => Ok(tributary),
Err(broadcast::error::TryRecvError::Empty) => Err(()),
Expand Down Expand Up @@ -608,7 +603,7 @@ async fn handle_processor_messages<D: Db, Pro: Processors, P: P2p>(
ProcessorMessage::Sign(msg) => match msg {
sign::ProcessorMessage::Preprocess { id, preprocess } => {
if id.attempt == 0 {
MainDb::<D>::save_first_preprocess(&mut txn, id.id, preprocess);
MainDb::<D>::save_first_preprocess(&mut txn, network, id.id, preprocess);

None
} else {
Expand Down Expand Up @@ -668,7 +663,7 @@ async fn handle_processor_messages<D: Db, Pro: Processors, P: P2p>(
// If this is the first attempt instance, wait until we synchronize around
// the batch first
if id.attempt == 0 {
MainDb::<D>::save_first_preprocess(&mut txn, id.id, preprocess);
MainDb::<D>::save_first_preprocess(&mut txn, spec.set().network, id.id, preprocess);

Some(Transaction::Batch(block.0, id.id))
} else {
Expand Down Expand Up @@ -942,7 +937,7 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
// This waits until the necessary preprocess is available
let get_preprocess = |raw_db, id| async move {
loop {
let Some(preprocess) = MainDb::<D>::first_preprocess(raw_db, id) else {
let Some(preprocess) = MainDb::<D>::first_preprocess(raw_db, network, id) else {
sleep(Duration::from_millis(100)).await;
continue;
};
Expand Down Expand Up @@ -985,7 +980,6 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
raw_db,
key.clone(),
recognized_id,
p2p.clone(),
processors.clone(),
serai.clone(),
new_tributary_listener_2,
Expand Down
9 changes: 3 additions & 6 deletions coordinator/src/substrate/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,10 @@ async fn in_set(
Ok(Some(data.participants.iter().any(|(participant, _)| participant.0 == key)))
}

async fn handle_new_set<D: Db, CNT: Clone + Fn(&mut D, TributarySpec), Pro: Processors>(
async fn handle_new_set<D: Db, CNT: Clone + Fn(&mut D, TributarySpec)>(
db: &mut D,
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
create_new_tributary: CNT,
processors: &Pro,
serai: &Serai,
block: &Block,
set: ValidatorSet,
Expand Down Expand Up @@ -88,7 +87,6 @@ async fn handle_new_set<D: Db, CNT: Clone + Fn(&mut D, TributarySpec), Pro: Proc
}

async fn handle_key_gen<Pro: Processors>(
key: &Zeroizing<<Ristretto as Ciphersuite>::F>,
processors: &Pro,
serai: &Serai,
block: &Block,
Expand Down Expand Up @@ -239,8 +237,7 @@ async fn handle_block<D: Db, CNT: Clone + Fn(&mut D, TributarySpec), Pro: Proces

if !SubstrateDb::<D>::handled_event(&db.0, hash, event_id) {
log::info!("found fresh new set event {:?}", new_set);
handle_new_set(&mut db.0, key, create_new_tributary.clone(), processors, serai, &block, set)
.await?;
handle_new_set(&mut db.0, key, create_new_tributary.clone(), serai, &block, set).await?;
let mut txn = db.0.txn();
SubstrateDb::<D>::handle_event(&mut txn, hash, event_id);
txn.commit();
Expand All @@ -259,7 +256,7 @@ async fn handle_block<D: Db, CNT: Clone + Fn(&mut D, TributarySpec), Pro: Proces
TributaryDb::<D>::set_key_pair(&mut txn, set, &key_pair);
txn.commit();

handle_key_gen(key, processors, serai, &block, set, key_pair).await?;
handle_key_gen(processors, serai, &block, set, key_pair).await?;
} else {
panic!("KeyGen event wasn't KeyGen: {key_gen:?}");
}
Expand Down
2 changes: 1 addition & 1 deletion coordinator/src/tests/tributary/dkg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ async fn dkg_test() {
let key_pair = (serai_client::Public(substrate_key), network_key.try_into().unwrap());

let mut txs = vec![];
for (k, key) in keys.iter().enumerate() {
for key in keys.iter() {
let attempt = 0;
// This is fine to re-use the one DB as such, due to exactly how this specific call is coded,
// albeit poor
Expand Down
2 changes: 1 addition & 1 deletion coordinator/src/tests/tributary/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ fn serialize_transaction() {
// Create a valid vec of shares
let mut shares = vec![];
// Create up to 512 participants
for i in 0 .. (OsRng.next_u64() % 512) {
for _ in 0 .. (OsRng.next_u64() % 512) {
let mut share = vec![0; share_len];
OsRng.fill_bytes(&mut share);
shares.push(share);
Expand Down
4 changes: 2 additions & 2 deletions coordinator/src/tributary/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ impl ReadWrite for Transaction {
let share_len = usize::from(u16::from_le_bytes(share_len));

let mut shares = vec![];
for i in 0 .. u16::from_le_bytes(share_quantity) {
for _ in 0 .. u16::from_le_bytes(share_quantity) {
let mut share = vec![0; share_len];
reader.read_exact(&mut share)?;
shares.push(share);
Expand Down Expand Up @@ -490,7 +490,7 @@ impl TransactionTrait for Transaction {
}
}

if let Transaction::SignCompleted { plan, tx_hash, first_signer, signature } = self {
if let Transaction::SignCompleted { first_signer, signature, .. } = self {
if !signature.verify(*first_signer, self.sign_completed_challenge()) {
Err(TransactionError::InvalidContent)?;
}
Expand Down

0 comments on commit 01a4b9e

Please sign in to comment.