Skip to content

Commit

Permalink
Database Macro (#408)
Browse files Browse the repository at this point in the history
* db_macro

* wip: converted prcessor/key_gen to use create_db macro

* wip: converted prcessor/key_gen to use create_db macro

* wip: formatting

* fix: added no_run to doc

* fix: documentation example had extra parenths

* fix: ignore doc test entirely

* Corrections from rebasing

* Misc lint

---------

Co-authored-by: Luke Parker <[email protected]>
  • Loading branch information
davidjohnbell and kayabaNerve authored Nov 5, 2023
1 parent 97fedf6 commit facb581
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 102 deletions.
68 changes: 68 additions & 0 deletions common/db/src/create_db.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#[doc(hidden)]
pub fn serai_db_key(
db_dst: &'static [u8],
item_dst: &'static [u8],
key: impl AsRef<[u8]>,
) -> Vec<u8> {
let db_len = u8::try_from(db_dst.len()).unwrap();
let dst_len = u8::try_from(item_dst.len()).unwrap();
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
}

/// Creates a series of structs which provide namespacing for keys
///
/// # Description
///
/// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro
/// uses a syntax similar to defining a function. Parameters are concatenated to produce a key,
/// they must be `scale` encodable. The return type is used to auto encode and decode the database
/// value bytes using `bincode`.
///
/// # Arguments
///
/// * `db_name` - A database name
/// * `field_name` - An item name
/// * `args` - Comma seperated list of key arguments
/// * `field_type` - The return type
///
/// # Example
///
/// ```ignore
/// create_db!(
/// TrubutariesDb {
/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64,
/// ExpiredDb: (genesis: [u8; 32]) -> Vec<u8>
/// }
/// )
/// ```
#[macro_export]
macro_rules! create_db {
($db_name: ident {
$($field_name: ident: ($($arg: ident: $arg_type: ty),*) -> $field_type: ty),*
}) => {
$(
#[derive(Clone, Debug)]
pub struct $field_name;
impl $field_name {
pub fn key($($arg: $arg_type),*) -> Vec<u8> {
$crate::serai_db_key(
stringify!($db_name).as_bytes(),
stringify!($field_name).as_bytes(),
($($arg),*).encode()
)
}
#[allow(dead_code)]
pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &impl serde::Serialize) {
let key = $field_name::key($($arg),*);
txn.put(&key, bincode::serialize(data).unwrap());
}
#[allow(dead_code)]
pub fn get(getter: &impl Get, $($arg: $arg_type),*) -> Option<$field_type> {
getter.get($field_name::key($($arg),*)).map(|data| {
bincode::deserialize(data.as_ref()).unwrap()
})
}
}
)*
};
}
3 changes: 3 additions & 0 deletions common/db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
mod create_db;
pub use create_db::*;

mod mem;
pub use mem::*;

Expand Down
166 changes: 64 additions & 102 deletions processor/src/key_gen.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use core::marker::PhantomData;
use std::collections::HashMap;

use zeroize::Zeroizing;
Expand All @@ -19,61 +18,48 @@ use scale::Encode;
use serai_client::validator_sets::primitives::{ValidatorSet, KeyPair};
use messages::key_gen::*;

use crate::{Get, DbTxn, Db, networks::Network};
use crate::{Get, DbTxn, Db, create_db, networks::Network};

#[derive(Debug)]
pub struct KeyConfirmed<C: Ciphersuite> {
pub substrate_keys: Vec<ThresholdKeys<Ristretto>>,
pub network_keys: Vec<ThresholdKeys<C>>,
}

#[derive(Clone, Debug)]
struct KeyGenDb<N: Network, D: Db>(PhantomData<D>, PhantomData<N>);
impl<N: Network, D: Db> KeyGenDb<N, D> {
fn key_gen_key(dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
D::key(b"KEY_GEN", dst, key)
create_db!(
KeyGenDb {
ParamsDb: (key: &ValidatorSet) -> (ThresholdParams, u16),
// Not scoped to the set since that'd have latter attempts overwrite former
// A former attempt may become the finalized attempt, even if it doesn't in a timely manner
// Overwriting its commitments would be accordingly poor
CommitmentsDb: (key: &KeyGenId) -> HashMap<Participant, Vec<u8>>,
GeneratedKeysDb: (set: &ValidatorSet, substrate_key: &[u8; 32], network_key: &[u8]) -> Vec<u8>,
KeysDb: (network_key: &[u8]) -> Vec<u8>
}
);

fn params_key(set: &ValidatorSet) -> Vec<u8> {
Self::key_gen_key(b"params", set.encode())
}
fn save_params(
txn: &mut D::Transaction<'_>,
set: &ValidatorSet,
params: &ThresholdParams,
shares: u16,
) {
txn.put(Self::params_key(set), bincode::serialize(&(params, shares)).unwrap());
}
fn params<G: Get>(getter: &G, set: &ValidatorSet) -> Option<(ThresholdParams, u16)> {
getter.get(Self::params_key(set)).map(|bytes| bincode::deserialize(&bytes).unwrap())
}
impl GeneratedKeysDb {
#[allow(clippy::type_complexity)]
fn read_keys<N: Network>(
getter: &impl Get,
key: &[u8],
) -> Option<(Vec<u8>, (Vec<ThresholdKeys<Ristretto>>, Vec<ThresholdKeys<N::Curve>>))> {
let keys_vec = getter.get(key)?;
let mut keys_ref: &[u8] = keys_vec.as_ref();

// Not scoped to the set since that'd have latter attempts overwrite former
// A former attempt may become the finalized attempt, even if it doesn't in a timely manner
// Overwriting its commitments would be accordingly poor
fn commitments_key(id: &KeyGenId) -> Vec<u8> {
Self::key_gen_key(b"commitments", id.encode())
}
fn save_commitments(
txn: &mut D::Transaction<'_>,
id: &KeyGenId,
commitments: &HashMap<Participant, Vec<u8>>,
) {
txn.put(Self::commitments_key(id), bincode::serialize(commitments).unwrap());
}
fn commitments<G: Get>(getter: &G, id: &KeyGenId) -> HashMap<Participant, Vec<u8>> {
bincode::deserialize::<HashMap<Participant, Vec<u8>>>(
&getter.get(Self::commitments_key(id)).unwrap(),
)
.unwrap()
let mut substrate_keys = vec![];
let mut network_keys = vec![];
while !keys_ref.is_empty() {
substrate_keys.push(ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap()));
let mut these_network_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
N::tweak_keys(&mut these_network_keys);
network_keys.push(these_network_keys);
}
Some((keys_vec, (substrate_keys, network_keys)))
}

fn generated_keys_key(set: ValidatorSet, key_pair: (&[u8; 32], &[u8])) -> Vec<u8> {
Self::key_gen_key(b"generated_keys", (set, key_pair).encode())
}
fn save_keys(
txn: &mut D::Transaction<'_>,
fn save_keys<N: Network>(
txn: &mut impl DbTxn,
id: &KeyGenId,
substrate_keys: &[ThresholdCore<Ristretto>],
network_keys: &[ThresholdKeys<N::Curve>],
Expand All @@ -84,46 +70,27 @@ impl<N: Network, D: Db> KeyGenDb<N, D> {
keys.extend(network_keys.serialize().as_slice());
}
txn.put(
Self::generated_keys_key(
id.set,
(
&substrate_keys[0].group_key().to_bytes(),
network_keys[0].group_key().to_bytes().as_ref(),
),
Self::key(
&id.set,
&substrate_keys[0].group_key().to_bytes(),
network_keys[0].group_key().to_bytes().as_ref(),
),
&keys,
keys,
);
}
}

fn keys_key(key: &<N::Curve as Ciphersuite>::G) -> Vec<u8> {
Self::key_gen_key(b"keys", key.to_bytes())
}
#[allow(clippy::type_complexity)]
fn read_keys<G: Get>(
getter: &G,
key: &[u8],
) -> Option<(Vec<u8>, (Vec<ThresholdKeys<Ristretto>>, Vec<ThresholdKeys<N::Curve>>))> {
let keys_vec = getter.get(key)?;
let mut keys_ref: &[u8] = keys_vec.as_ref();

let mut substrate_keys = vec![];
let mut network_keys = vec![];
while !keys_ref.is_empty() {
substrate_keys.push(ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap()));
let mut these_network_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
N::tweak_keys(&mut these_network_keys);
network_keys.push(these_network_keys);
}
Some((keys_vec, (substrate_keys, network_keys)))
}
fn confirm_keys(
txn: &mut D::Transaction<'_>,
impl KeysDb {
fn confirm_keys<N: Network>(
txn: &mut impl DbTxn,
set: ValidatorSet,
key_pair: KeyPair,
) -> (Vec<ThresholdKeys<Ristretto>>, Vec<ThresholdKeys<N::Curve>>) {
let (keys_vec, keys) =
Self::read_keys(txn, &Self::generated_keys_key(set, (&key_pair.0 .0, key_pair.1.as_ref())))
.unwrap();
let (keys_vec, keys) = GeneratedKeysDb::read_keys::<N>(
txn,
&GeneratedKeysDb::key(&set, &key_pair.0 .0, key_pair.1.as_ref()),
)
.unwrap();
assert_eq!(key_pair.0 .0, keys.0[0].group_key().to_bytes());
assert_eq!(
{
Expand All @@ -132,16 +99,18 @@ impl<N: Network, D: Db> KeyGenDb<N, D> {
},
keys.1[0].group_key().to_bytes().as_ref(),
);
txn.put(Self::keys_key(&keys.1[0].group_key()), keys_vec);
txn.put(KeysDb::key(keys.1[0].group_key().to_bytes().as_ref()), keys_vec);
keys
}

#[allow(clippy::type_complexity)]
fn keys<G: Get>(
getter: &G,
key: &<N::Curve as Ciphersuite>::G,
fn keys<N: Network>(
getter: &impl Get,
network_key: &<N::Curve as Ciphersuite>::G,
) -> Option<(Vec<ThresholdKeys<Ristretto>>, Vec<ThresholdKeys<N::Curve>>)> {
let res = Self::read_keys(getter, &Self::keys_key(key))?.1;
assert_eq!(&res.1[0].group_key(), key);
let res =
GeneratedKeysDb::read_keys::<N>(getter, &Self::key(network_key.to_bytes().as_ref()))?.1;
assert_eq!(&res.1[0].group_key(), network_key);
Some(res)
}
}
Expand All @@ -168,7 +137,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {

pub fn in_set(&self, set: &ValidatorSet) -> bool {
// We determine if we're in set using if we have the parameters for a set's key generation
KeyGenDb::<N, D>::params(&self.db, set).is_some()
ParamsDb::get(&self.db, set).is_some()
}

#[allow(clippy::type_complexity)]
Expand All @@ -177,15 +146,8 @@ impl<N: Network, D: Db> KeyGen<N, D> {
key: &<N::Curve as Ciphersuite>::G,
) -> Option<(Vec<ThresholdKeys<Ristretto>>, Vec<ThresholdKeys<N::Curve>>)> {
// This is safe, despite not having a txn, since it's a static value
// The only concern is it may not be set when expected, or it may be set unexpectedly
//
// They're only expected to be set on boot, if confirmed. If they were confirmed yet the
// transaction wasn't committed, their confirmation will be re-handled
//
// The only other concern is if it's set when it's not safe to use
// The keys are only written on confirmation, and the transaction writing them is atomic to
// every associated operation
KeyGenDb::<N, D>::keys(&self.db, key)
// It doesn't change over time/in relation to other operations
KeysDb::keys::<N>(&self.db, key)
}

pub async fn handle(
Expand Down Expand Up @@ -313,7 +275,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
self.active_share.remove(&id.set).is_none()
{
// If we haven't handled this set before, save the params
KeyGenDb::<N, D>::save_params(txn, &id.set, &params, shares);
ParamsDb::set(txn, &id.set, &(params, shares));
}

let (machines, commitments) = key_gen_machines(id, params, shares);
Expand All @@ -332,7 +294,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
panic!("commitments when already handled commitments");
}

let (params, share_quantity) = KeyGenDb::<N, D>::params(txn, &id.set).unwrap();
let (params, share_quantity) = ParamsDb::get(txn, &id.set).unwrap();

// Unwrap the machines, rebuilding them if we didn't have them in our cache
// We won't if the processor rebooted
Expand All @@ -344,7 +306,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
.remove(&id.set)
.unwrap_or_else(|| key_gen_machines(id, params, share_quantity));

KeyGenDb::<N, D>::save_commitments(txn, &id, &commitments);
CommitmentsDb::set(txn, &id, &commitments);
let (machines, shares) = secret_share_machines(id, params, prior, commitments);

self.active_share.insert(id.set, (machines, shares.clone()));
Expand All @@ -355,12 +317,12 @@ impl<N: Network, D: Db> KeyGen<N, D> {
CoordinatorMessage::Shares { id, shares } => {
info!("Received shares for {:?}", id);

let (params, share_quantity) = KeyGenDb::<N, D>::params(txn, &id.set).unwrap();
let (params, share_quantity) = ParamsDb::get(txn, &id.set).unwrap();

// Same commentary on inconsistency as above exists
let (machines, our_shares) = self.active_share.remove(&id.set).unwrap_or_else(|| {
let prior = key_gen_machines(id, params, share_quantity);
secret_share_machines(id, params, prior, KeyGenDb::<N, D>::commitments(txn, &id))
secret_share_machines(id, params, prior, CommitmentsDb::get(txn, &id).unwrap())
});

let mut rng = share_rng(id);
Expand Down Expand Up @@ -437,7 +399,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
}
}

KeyGenDb::<N, D>::save_keys(txn, &id, &substrate_keys, &network_keys);
GeneratedKeysDb::save_keys::<N>(txn, &id, &substrate_keys, &network_keys);

ProcessorMessage::GeneratedKeyPair {
id,
Expand All @@ -454,15 +416,15 @@ impl<N: Network, D: Db> KeyGen<N, D> {
set: ValidatorSet,
key_pair: KeyPair,
) -> KeyConfirmed<N::Curve> {
let (substrate_keys, network_keys) = KeyGenDb::<N, D>::confirm_keys(txn, set, key_pair.clone());

info!(
"Confirmed key pair {} {} for set {:?}",
hex::encode(key_pair.0),
hex::encode(key_pair.1),
hex::encode(&key_pair.1),
set,
);

let (substrate_keys, network_keys) = KeysDb::confirm_keys::<N>(txn, set, key_pair);

KeyConfirmed { substrate_keys, network_keys }
}
}

0 comments on commit facb581

Please sign in to comment.