From 74f405acbe880ee9fe602fa101e5775eb810f84c Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Wed, 25 Oct 2023 16:46:51 +0400 Subject: [PATCH 1/9] db_macro --- common/db/src/create_db.rs | 59 ++++++++++++++++++++++++++++++++++++++ common/db/src/lib.rs | 3 +- 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 common/db/src/create_db.rs diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs new file mode 100644 index 000000000..fa842b270 --- /dev/null +++ b/common/db/src/create_db.rs @@ -0,0 +1,59 @@ +pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec { + let db_len = u8::try_from(db_dst.len()).unwrap(); + let dst_len = u8::try_from(item_dst.len()).unwrap(); + [[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat() +} + +/// Creates a series of structs which provide namespacing for keys +/// +/// # Description +/// +/// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro +/// uses a syntax similar to defining a function. Parameters are concatenated to produce a key, +/// they must be `scale` encodable. The return type is used to auto encode and decode the database +/// value bytes using `bincode`. +/// +/// # Arguments +/// +/// * `db_name` - A database name +/// * `field_name` - An item name +/// * `args` - Comma seperated list of key arguments +/// * `field_type` - The return type +/// +/// # Example +/// +/// ``` +/// create_db!({ +/// TrubutariesDb { +/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, +/// ExpiredDb: (genesis: [u8; 32]) -> Vec +/// } +/// }) +/// ``` +#[macro_export] +macro_rules! create_db { + ($db_name: ident + { $($field_name: ident: ($($arg: ident: $arg_type: ty),*) -> $field_type: ty),*} + ) => { + $( + #[derive(Clone, Debug)] + pub struct $field_name; + impl $field_name { + pub fn key($($arg: $arg_type),*) -> Vec { + $crate::db_key(stringify!($db_name).as_bytes(), stringify!($field_name).as_bytes(), (vec![] as Vec, $($arg),*).encode()) + } + #[allow(dead_code)] + pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &impl serde::Serialize) { + let key = $field_name::key($($arg),*); + txn.put(&key, bincode::serialize(data).unwrap()); + } + #[allow(dead_code)] + pub fn get(getter: &impl Get, $($arg: $arg_type),*) -> Option<$field_type> { + getter.get($field_name::key($($arg),*)).map(|data| { + bincode::deserialize(data.as_ref()).unwrap() + }) + } + } + )* + }; +} diff --git a/common/db/src/lib.rs b/common/db/src/lib.rs index 0bcf9810a..ad38a4e54 100644 --- a/common/db/src/lib.rs +++ b/common/db/src/lib.rs @@ -5,7 +5,8 @@ pub use mem::*; mod rocks; #[cfg(feature = "rocksdb")] pub use rocks::{RocksDB, new_rocksdb}; - +mod create_db; +pub use create_db::*; /// An object implementing get. pub trait Get { fn get(&self, key: impl AsRef<[u8]>) -> Option>; From 5b59cd801f838851e47915913301cfb7f5840d67 Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 27 Oct 2023 16:00:10 +0400 Subject: [PATCH 2/9] wip: converted prcessor/key_gen to use create_db macro --- processor/src/key_gen.rs | 132 +++++++++++++-------------------------- 1 file changed, 44 insertions(+), 88 deletions(-) diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index 8788cd222..ab666fe31 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -1,6 +1,6 @@ -use core::marker::PhantomData; use std::collections::HashMap; +use serai_db::create_db; use zeroize::Zeroizing; use rand_core::SeedableRng; @@ -27,80 +27,22 @@ pub struct KeyConfirmed { pub network_keys: Vec>, } -#[derive(Clone, Debug)] -struct KeyGenDb(PhantomData, PhantomData); -impl KeyGenDb { - fn key_gen_key(dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec { - D::key(b"KEY_GEN", dst, key) +create_db!( + KeyGenDb { + ParamsDb: (key: &ValidatorSet) -> (ThresholdParams, u16), + // Not scoped to the set since that'd have latter attempts overwrite former + // A former attempt may become the finalized attempt, even if it doesn't in a timely manner + // Overwriting its commitments would be accordingly poor + CommitmentsDb: (key: &KeyGenId) -> HashMap>, + GeneratedKeysDb: (set: &ValidatorSet, substrate_key: &[u8; 32], network_key: &[u8]) -> Vec, + KeysDb: (key: &Vec) -> Vec } +); - fn params_key(set: &ValidatorSet) -> Vec { - Self::key_gen_key(b"params", set.encode()) - } - fn save_params( - txn: &mut D::Transaction<'_>, - set: &ValidatorSet, - params: &ThresholdParams, - shares: u16, - ) { - txn.put(Self::params_key(set), bincode::serialize(&(params, shares)).unwrap()); - } - fn params(getter: &G, set: &ValidatorSet) -> Option<(ThresholdParams, u16)> { - getter.get(Self::params_key(set)).map(|bytes| bincode::deserialize(&bytes).unwrap()) - } - - // Not scoped to the set since that'd have latter attempts overwrite former - // A former attempt may become the finalized attempt, even if it doesn't in a timely manner - // Overwriting its commitments would be accordingly poor - fn commitments_key(id: &KeyGenId) -> Vec { - Self::key_gen_key(b"commitments", id.encode()) - } - fn save_commitments( - txn: &mut D::Transaction<'_>, - id: &KeyGenId, - commitments: &HashMap>, - ) { - txn.put(Self::commitments_key(id), bincode::serialize(commitments).unwrap()); - } - fn commitments(getter: &G, id: &KeyGenId) -> HashMap> { - bincode::deserialize::>>( - &getter.get(Self::commitments_key(id)).unwrap(), - ) - .unwrap() - } - - fn generated_keys_key(set: ValidatorSet, key_pair: (&[u8; 32], &[u8])) -> Vec { - Self::key_gen_key(b"generated_keys", (set, key_pair).encode()) - } - fn save_keys( - txn: &mut D::Transaction<'_>, - id: &KeyGenId, - substrate_keys: &[ThresholdCore], - network_keys: &[ThresholdKeys], - ) { - let mut keys = Zeroizing::new(vec![]); - for (substrate_keys, network_keys) in substrate_keys.iter().zip(network_keys) { - keys.extend(substrate_keys.serialize().as_slice()); - keys.extend(network_keys.serialize().as_slice()); - } - txn.put( - Self::generated_keys_key( - id.set, - ( - &substrate_keys[0].group_key().to_bytes(), - network_keys[0].group_key().to_bytes().as_ref(), - ), - ), - &keys, - ); - } - - fn keys_key(key: &::G) -> Vec { - Self::key_gen_key(b"keys", key.to_bytes()) - } +impl KeysDb { #[allow(clippy::type_complexity)] - fn read_keys( - getter: &G, + fn read_keys( + getter: &impl Get, key: &[u8], ) -> Option<(Vec, (Vec>, Vec>))> { let keys_vec = getter.get(key)?; @@ -116,8 +58,9 @@ impl KeyGenDb { } Some((keys_vec, (substrate_keys, network_keys))) } - fn confirm_keys( - txn: &mut D::Transaction<'_>, + + fn confirm_keys( + txn: &mut impl DbTxn, set: ValidatorSet, key_pair: KeyPair, ) -> (Vec>, Vec>) { @@ -132,19 +75,32 @@ impl KeyGenDb { }, keys.1[0].group_key().to_bytes().as_ref(), ); - txn.put(Self::keys_key(&keys.1[0].group_key()), keys_vec); + txn.put(KeysDb::key(&keys.1[0].group_key().to_bytes().as_ref().into()), keys_vec); keys } + #[allow(clippy::type_complexity)] - fn keys( - getter: &G, + fn keys( + getter: &impl Get, key: &::G, - ) -> Option<(Vec>, Vec>)> { - let res = Self::read_keys(getter, &Self::keys_key(key))?.1; + ) -> Option<(ThresholdKeys, ThresholdKeys)> { + let res = Self::read_keys::(getter, &KeysDb::key(&key.to_bytes().as_ref().into()))?.1; assert_eq!(&res.1[0].group_key(), key); Some(res) } } +impl GeneratedKeysDb { + fn save_keys( + txn: &mut impl DbTxn, + id: &KeyGenId, + substrate_keys: &ThresholdCore, + network_keys: &ThresholdKeys, + ) { + let mut keys = substrate_keys.serialize(); + keys.extend(network_keys.serialize().iter()); + txn.put(Self::key(&id.set, &substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()), keys); + } +} type SecretShareMachines = Vec<(SecretShareMachine, SecretShareMachine<::Curve>)>; @@ -168,7 +124,7 @@ impl KeyGen { pub fn in_set(&self, set: &ValidatorSet) -> bool { // We determine if we're in set using if we have the parameters for a set's key generation - KeyGenDb::::params(&self.db, set).is_some() + ParamsDb::get(&self.db, set).is_some() } #[allow(clippy::type_complexity)] @@ -185,7 +141,7 @@ impl KeyGen { // The only other concern is if it's set when it's not safe to use // The keys are only written on confirmation, and the transaction writing them is atomic to // every associated operation - KeyGenDb::::keys(&self.db, key) + KeysDb::keys::(&self.db, key) } pub async fn handle( @@ -313,7 +269,7 @@ impl KeyGen { self.active_share.remove(&id.set).is_none() { // If we haven't handled this set before, save the params - KeyGenDb::::save_params(txn, &id.set, ¶ms, shares); + ParamsDb::set(txn, &id.set, ¶ms, shares); } let (machines, commitments) = key_gen_machines(id, params, shares); @@ -332,7 +288,7 @@ impl KeyGen { panic!("commitments when already handled commitments"); } - let (params, share_quantity) = KeyGenDb::::params(txn, &id.set).unwrap(); + let (params, share_quantity) = ParamsDb::get(txn, &id.set).unwrap(); // Unwrap the machines, rebuilding them if we didn't have them in our cache // We won't if the processor rebooted @@ -344,7 +300,7 @@ impl KeyGen { .remove(&id.set) .unwrap_or_else(|| key_gen_machines(id, params, share_quantity)); - KeyGenDb::::save_commitments(txn, &id, &commitments); + CommitmentsDb::set(txn, &id, &commitments); let (machines, shares) = secret_share_machines(id, params, prior, commitments); self.active_share.insert(id.set, (machines, shares.clone())); @@ -355,12 +311,12 @@ impl KeyGen { CoordinatorMessage::Shares { id, shares } => { info!("Received shares for {:?}", id); - let (params, share_quantity) = KeyGenDb::::params(txn, &id.set).unwrap(); + let (params, share_quantity) = ParamsDb::get(txn, &id.set).unwrap(); // Same commentary on inconsistency as above exists let (machines, our_shares) = self.active_share.remove(&id.set).unwrap_or_else(|| { let prior = key_gen_machines(id, params, share_quantity); - secret_share_machines(id, params, prior, KeyGenDb::::commitments(txn, &id)) + secret_share_machines(id, params, prior, CommitmentsDb::get::(txn, &id)) }); let mut rng = share_rng(id); @@ -437,7 +393,7 @@ impl KeyGen { } } - KeyGenDb::::save_keys(txn, &id, &substrate_keys, &network_keys); + GeneratedKeysDb::save_keys::(txn, &id, &substrate_keys, &network_keys); ProcessorMessage::GeneratedKeyPair { id, @@ -454,7 +410,7 @@ impl KeyGen { set: ValidatorSet, key_pair: KeyPair, ) -> KeyConfirmed { - let (substrate_keys, network_keys) = KeyGenDb::::confirm_keys(txn, set, key_pair.clone()); + let (substrate_keys, network_keys) = KeysDb::confirm_keys::(txn, set, key_pair); info!( "Confirmed key pair {} {} for set {:?}", From 42d6e581d5ebd760b134080ddc5aaecd56e81bf9 Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 27 Oct 2023 16:00:10 +0400 Subject: [PATCH 3/9] wip: converted prcessor/key_gen to use create_db macro --- processor/src/key_gen.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index ab666fe31..e4700ce21 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -58,6 +58,7 @@ impl KeysDb { } Some((keys_vec, (substrate_keys, network_keys))) } +); fn confirm_keys( txn: &mut impl DbTxn, @@ -101,6 +102,18 @@ impl GeneratedKeysDb { txn.put(Self::key(&id.set, &substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()), keys); } } +impl GeneratedKeysDb { + fn save_keys( + txn: &mut impl DbTxn, + id: &KeyGenId, + substrate_keys: &ThresholdCore, + network_keys: &ThresholdKeys, + ) { + let mut keys = substrate_keys.serialize(); + keys.extend(network_keys.serialize().iter()); + txn.put(Self::key(&id.set, &substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()), keys); + } +} type SecretShareMachines = Vec<(SecretShareMachine, SecretShareMachine<::Curve>)>; From a39b3cc32ce6e07455370e3aa5d2e777778101bb Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 3 Nov 2023 11:27:02 +0400 Subject: [PATCH 4/9] wip: formatting --- common/db/src/create_db.rs | 26 +++++++++++++++----------- processor/src/key_gen.rs | 12 +++++++++--- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs index fa842b270..f532ae964 100644 --- a/common/db/src/create_db.rs +++ b/common/db/src/create_db.rs @@ -1,31 +1,31 @@ pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec { - let db_len = u8::try_from(db_dst.len()).unwrap(); - let dst_len = u8::try_from(item_dst.len()).unwrap(); - [[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat() + let db_len = u8::try_from(db_dst.len()).unwrap(); + let dst_len = u8::try_from(item_dst.len()).unwrap(); + [[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat() } /// Creates a series of structs which provide namespacing for keys -/// +/// /// # Description -/// +/// /// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro /// uses a syntax similar to defining a function. Parameters are concatenated to produce a key, /// they must be `scale` encodable. The return type is used to auto encode and decode the database /// value bytes using `bincode`. -/// +/// /// # Arguments -/// +/// /// * `db_name` - A database name /// * `field_name` - An item name /// * `args` - Comma seperated list of key arguments /// * `field_type` - The return type -/// +/// /// # Example -/// +/// /// ``` /// create_db!({ /// TrubutariesDb { -/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, +/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, /// ExpiredDb: (genesis: [u8; 32]) -> Vec /// } /// }) @@ -40,7 +40,11 @@ macro_rules! create_db { pub struct $field_name; impl $field_name { pub fn key($($arg: $arg_type),*) -> Vec { - $crate::db_key(stringify!($db_name).as_bytes(), stringify!($field_name).as_bytes(), (vec![] as Vec, $($arg),*).encode()) + $crate::db_key( + stringify!($db_name).as_bytes(), + stringify!($field_name).as_bytes(), + (vec![] as Vec, $($arg),*).encode() + ) } #[allow(dead_code)] pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &impl serde::Serialize) { diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index e4700ce21..7605ec16d 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -58,7 +58,6 @@ impl KeysDb { } Some((keys_vec, (substrate_keys, network_keys))) } -); fn confirm_keys( txn: &mut impl DbTxn, @@ -84,7 +83,7 @@ impl KeysDb { fn keys( getter: &impl Get, key: &::G, - ) -> Option<(ThresholdKeys, ThresholdKeys)> { + ) -> Option<(Vec>, Vec>)> { let res = Self::read_keys::(getter, &KeysDb::key(&key.to_bytes().as_ref().into()))?.1; assert_eq!(&res.1[0].group_key(), key); Some(res) @@ -99,7 +98,14 @@ impl GeneratedKeysDb { ) { let mut keys = substrate_keys.serialize(); keys.extend(network_keys.serialize().iter()); - txn.put(Self::key(&id.set, &substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()), keys); + txn.put( + Self::key( + &id.set, + &substrate_keys.group_key().to_bytes(), + network_keys.group_key().to_bytes().as_ref(), + ), + keys, + ); } } impl GeneratedKeysDb { From a3835f409f90f7974489921c50e985dd43c08cee Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 3 Nov 2023 14:23:14 +0400 Subject: [PATCH 5/9] fix: added no_run to doc --- common/db/src/create_db.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs index f532ae964..1b9721b57 100644 --- a/common/db/src/create_db.rs +++ b/common/db/src/create_db.rs @@ -22,7 +22,7 @@ pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u /// /// # Example /// -/// ``` +/// ```no_run /// create_db!({ /// TrubutariesDb { /// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, From 32718cc1e5706fb665ba3114508cb2a68ee73f53 Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 3 Nov 2023 14:28:36 +0400 Subject: [PATCH 6/9] fix: documentation example had extra parenths --- common/db/src/create_db.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs index 1b9721b57..64ae3d698 100644 --- a/common/db/src/create_db.rs +++ b/common/db/src/create_db.rs @@ -23,12 +23,12 @@ pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u /// # Example /// /// ```no_run -/// create_db!({ +/// create_db!( /// TrubutariesDb { /// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, /// ExpiredDb: (genesis: [u8; 32]) -> Vec /// } -/// }) +/// ) /// ``` #[macro_export] macro_rules! create_db { From 6177e6010c9a9ac148523aafc332ecd1e80e050b Mon Sep 17 00:00:00 2001 From: David Bell <17103917+davidjohnbell@users.noreply.github.com> Date: Fri, 3 Nov 2023 14:48:29 +0400 Subject: [PATCH 7/9] fix: ignore doc test entirely --- common/db/src/create_db.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs index 64ae3d698..1d09107e3 100644 --- a/common/db/src/create_db.rs +++ b/common/db/src/create_db.rs @@ -22,7 +22,7 @@ pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u /// /// # Example /// -/// ```no_run +/// ```ignore /// create_db!( /// TrubutariesDb { /// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64, From 7c2ec649220bc52b074dd269588505c1c5c506ae Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 4 Nov 2023 23:39:25 -0400 Subject: [PATCH 8/9] Corrections from rebasing --- processor/src/key_gen.rs | 86 +++++++++++++++++----------------------- 1 file changed, 37 insertions(+), 49 deletions(-) diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index 7605ec16d..6ac468b15 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -39,7 +39,7 @@ create_db!( } ); -impl KeysDb { +impl GeneratedKeysDb { #[allow(clippy::type_complexity)] fn read_keys( getter: &impl Get, @@ -59,14 +59,39 @@ impl KeysDb { Some((keys_vec, (substrate_keys, network_keys))) } + fn save_keys( + txn: &mut impl DbTxn, + id: &KeyGenId, + substrate_keys: &[ThresholdCore], + network_keys: &[ThresholdKeys], + ) { + let mut keys = Zeroizing::new(vec![]); + for (substrate_keys, network_keys) in substrate_keys.iter().zip(network_keys) { + keys.extend(substrate_keys.serialize().as_slice()); + keys.extend(network_keys.serialize().as_slice()); + } + txn.put( + Self::key( + &id.set, + &substrate_keys[0].group_key().to_bytes(), + network_keys[0].group_key().to_bytes().as_ref(), + ), + keys, + ); + } +} + +impl KeysDb { fn confirm_keys( txn: &mut impl DbTxn, set: ValidatorSet, key_pair: KeyPair, ) -> (Vec>, Vec>) { - let (keys_vec, keys) = - Self::read_keys(txn, &Self::generated_keys_key(set, (&key_pair.0 .0, key_pair.1.as_ref()))) - .unwrap(); + let (keys_vec, keys) = GeneratedKeysDb::read_keys::( + txn, + &GeneratedKeysDb::key(&set, &key_pair.0 .0, key_pair.1.as_ref()), + ) + .unwrap(); assert_eq!(key_pair.0 .0, keys.0[0].group_key().to_bytes()); assert_eq!( { @@ -84,42 +109,12 @@ impl KeysDb { getter: &impl Get, key: &::G, ) -> Option<(Vec>, Vec>)> { - let res = Self::read_keys::(getter, &KeysDb::key(&key.to_bytes().as_ref().into()))?.1; + let res = + GeneratedKeysDb::read_keys::(getter, &Self::key(&key.to_bytes().as_ref().into()))?.1; assert_eq!(&res.1[0].group_key(), key); Some(res) } } -impl GeneratedKeysDb { - fn save_keys( - txn: &mut impl DbTxn, - id: &KeyGenId, - substrate_keys: &ThresholdCore, - network_keys: &ThresholdKeys, - ) { - let mut keys = substrate_keys.serialize(); - keys.extend(network_keys.serialize().iter()); - txn.put( - Self::key( - &id.set, - &substrate_keys.group_key().to_bytes(), - network_keys.group_key().to_bytes().as_ref(), - ), - keys, - ); - } -} -impl GeneratedKeysDb { - fn save_keys( - txn: &mut impl DbTxn, - id: &KeyGenId, - substrate_keys: &ThresholdCore, - network_keys: &ThresholdKeys, - ) { - let mut keys = substrate_keys.serialize(); - keys.extend(network_keys.serialize().iter()); - txn.put(Self::key(&id.set, &substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()), keys); - } -} type SecretShareMachines = Vec<(SecretShareMachine, SecretShareMachine<::Curve>)>; @@ -152,14 +147,7 @@ impl KeyGen { key: &::G, ) -> Option<(Vec>, Vec>)> { // This is safe, despite not having a txn, since it's a static value - // The only concern is it may not be set when expected, or it may be set unexpectedly - // - // They're only expected to be set on boot, if confirmed. If they were confirmed yet the - // transaction wasn't committed, their confirmation will be re-handled - // - // The only other concern is if it's set when it's not safe to use - // The keys are only written on confirmation, and the transaction writing them is atomic to - // every associated operation + // It doesn't change over time/in relation to other operations KeysDb::keys::(&self.db, key) } @@ -288,7 +276,7 @@ impl KeyGen { self.active_share.remove(&id.set).is_none() { // If we haven't handled this set before, save the params - ParamsDb::set(txn, &id.set, ¶ms, shares); + ParamsDb::set(txn, &id.set, &(params, shares)); } let (machines, commitments) = key_gen_machines(id, params, shares); @@ -335,7 +323,7 @@ impl KeyGen { // Same commentary on inconsistency as above exists let (machines, our_shares) = self.active_share.remove(&id.set).unwrap_or_else(|| { let prior = key_gen_machines(id, params, share_quantity); - secret_share_machines(id, params, prior, CommitmentsDb::get::(txn, &id)) + secret_share_machines(id, params, prior, CommitmentsDb::get(txn, &id).unwrap()) }); let mut rng = share_rng(id); @@ -429,15 +417,15 @@ impl KeyGen { set: ValidatorSet, key_pair: KeyPair, ) -> KeyConfirmed { - let (substrate_keys, network_keys) = KeysDb::confirm_keys::(txn, set, key_pair); - info!( "Confirmed key pair {} {} for set {:?}", hex::encode(key_pair.0), - hex::encode(key_pair.1), + hex::encode(&key_pair.1), set, ); + let (substrate_keys, network_keys) = KeysDb::confirm_keys::(txn, set, key_pair); + KeyConfirmed { substrate_keys, network_keys } } } From 576057cd4bff4167184dd72cc5a9d07be66c4900 Mon Sep 17 00:00:00 2001 From: Luke Parker Date: Sat, 4 Nov 2023 23:51:53 -0400 Subject: [PATCH 9/9] Misc lint --- common/db/src/create_db.rs | 19 ++++++++++++------- common/db/src/lib.rs | 6 ++++-- processor/src/key_gen.rs | 13 ++++++------- 3 files changed, 22 insertions(+), 16 deletions(-) diff --git a/common/db/src/create_db.rs b/common/db/src/create_db.rs index 1d09107e3..101448b98 100644 --- a/common/db/src/create_db.rs +++ b/common/db/src/create_db.rs @@ -1,4 +1,9 @@ -pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec { +#[doc(hidden)] +pub fn serai_db_key( + db_dst: &'static [u8], + item_dst: &'static [u8], + key: impl AsRef<[u8]>, +) -> Vec { let db_len = u8::try_from(db_dst.len()).unwrap(); let dst_len = u8::try_from(item_dst.len()).unwrap(); [[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat() @@ -32,22 +37,22 @@ pub fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u /// ``` #[macro_export] macro_rules! create_db { - ($db_name: ident - { $($field_name: ident: ($($arg: ident: $arg_type: ty),*) -> $field_type: ty),*} - ) => { + ($db_name: ident { + $($field_name: ident: ($($arg: ident: $arg_type: ty),*) -> $field_type: ty),* + }) => { $( #[derive(Clone, Debug)] pub struct $field_name; impl $field_name { pub fn key($($arg: $arg_type),*) -> Vec { - $crate::db_key( + $crate::serai_db_key( stringify!($db_name).as_bytes(), stringify!($field_name).as_bytes(), - (vec![] as Vec, $($arg),*).encode() + ($($arg),*).encode() ) } #[allow(dead_code)] - pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &impl serde::Serialize) { + pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &impl serde::Serialize) { let key = $field_name::key($($arg),*); txn.put(&key, bincode::serialize(data).unwrap()); } diff --git a/common/db/src/lib.rs b/common/db/src/lib.rs index ad38a4e54..031bcd4ed 100644 --- a/common/db/src/lib.rs +++ b/common/db/src/lib.rs @@ -1,3 +1,6 @@ +mod create_db; +pub use create_db::*; + mod mem; pub use mem::*; @@ -5,8 +8,7 @@ pub use mem::*; mod rocks; #[cfg(feature = "rocksdb")] pub use rocks::{RocksDB, new_rocksdb}; -mod create_db; -pub use create_db::*; + /// An object implementing get. pub trait Get { fn get(&self, key: impl AsRef<[u8]>) -> Option>; diff --git a/processor/src/key_gen.rs b/processor/src/key_gen.rs index 6ac468b15..9e20f57e4 100644 --- a/processor/src/key_gen.rs +++ b/processor/src/key_gen.rs @@ -1,6 +1,5 @@ use std::collections::HashMap; -use serai_db::create_db; use zeroize::Zeroizing; use rand_core::SeedableRng; @@ -19,7 +18,7 @@ use scale::Encode; use serai_client::validator_sets::primitives::{ValidatorSet, KeyPair}; use messages::key_gen::*; -use crate::{Get, DbTxn, Db, networks::Network}; +use crate::{Get, DbTxn, Db, create_db, networks::Network}; #[derive(Debug)] pub struct KeyConfirmed { @@ -35,7 +34,7 @@ create_db!( // Overwriting its commitments would be accordingly poor CommitmentsDb: (key: &KeyGenId) -> HashMap>, GeneratedKeysDb: (set: &ValidatorSet, substrate_key: &[u8; 32], network_key: &[u8]) -> Vec, - KeysDb: (key: &Vec) -> Vec + KeysDb: (network_key: &[u8]) -> Vec } ); @@ -100,18 +99,18 @@ impl KeysDb { }, keys.1[0].group_key().to_bytes().as_ref(), ); - txn.put(KeysDb::key(&keys.1[0].group_key().to_bytes().as_ref().into()), keys_vec); + txn.put(KeysDb::key(keys.1[0].group_key().to_bytes().as_ref()), keys_vec); keys } #[allow(clippy::type_complexity)] fn keys( getter: &impl Get, - key: &::G, + network_key: &::G, ) -> Option<(Vec>, Vec>)> { let res = - GeneratedKeysDb::read_keys::(getter, &Self::key(&key.to_bytes().as_ref().into()))?.1; - assert_eq!(&res.1[0].group_key(), key); + GeneratedKeysDb::read_keys::(getter, &Self::key(network_key.to_bytes().as_ref()))?.1; + assert_eq!(&res.1[0].group_key(), network_key); Some(res) } }