Skip to content

Commit

Permalink
Refactor public param config
Browse files Browse the repository at this point in the history
Unify public param config with config.rs, spin out CLI config

Address feedback and refactor CLI overrides

Simplify global initialization

Add configurable config file location
  • Loading branch information
samuelburnham committed Oct 24, 2023
1 parent 3292852 commit 962f220
Show file tree
Hide file tree
Showing 31 changed files with 851 additions and 628 deletions.
22 changes: 10 additions & 12 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ bellpepper-core = { workspace = true }
bincode = { workspace = true }
blstrs = { workspace = true }
bytecount = "=0.6.4"
camino = { workspace = true }
camino = { workspace = true, features = ["serde1"] }
clap = { workspace = true, features = ["derive"] }
config = "0.13.3"
dashmap = "5.5.0"
Expand All @@ -34,7 +34,7 @@ itertools = "0.9"
lurk-macros = { path = "lurk-macros" }
lurk-metrics = { path = "lurk-metrics" }
metrics = { workspace = true }
neptune = { workspace = true, features = ["arity2","arity4","arity8","arity16","pasta","bls"] }
neptune = { workspace = true, features = ["arity2", "arity4", "arity8", "arity16", "pasta", "bls"] }
nom = "7.1.3"
nom_locate = "4.1.0"
nova = { workspace = true }
Expand All @@ -58,7 +58,7 @@ serde_repr = "0.1.14"
tap = "1.0.1"
stable_deref_trait = "1.2.0"
thiserror = { workspace = true }
abomonation = { workspace = true}
abomonation = { workspace = true }
abomonation_derive = { git = "https://github.com/lurk-lab/abomonation_derive.git" }
crossbeam = "0.8.2"
byteorder = "1.4.3"
Expand All @@ -69,7 +69,7 @@ ansi_term = "0.12.1"
tracing = { workspace = true }
tracing-texray = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
elsa = { version = "1.9.0", git="https://github.com/lurk-lab/elsa", branch = "sync_index_map", features = ["indexmap"] }
elsa = { version = "1.9.0", git = "https://github.com/lurk-lab/elsa", branch = "sync_index_map", features = ["indexmap"] }
arc-swap = "1.6.0"

[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
Expand All @@ -78,7 +78,10 @@ pasta-msm = { workspace = true }
proptest = { workspace = true }
proptest-derive = { workspace = true }
rand = "0.8.5"
rustyline = { version = "11.0", features = ["derive", "with-file-history"], default-features = false }
rustyline = { version = "11.0", features = [
"derive",
"with-file-history",
], default-features = false }
home = "0.5.5"

[target.'cfg(target_arch = "wasm32")'.dependencies]
Expand Down Expand Up @@ -110,12 +113,7 @@ vergen = { version = "8", features = ["build", "git", "gitcl"] }

[workspace]
resolver = "2"
members = [
"clutch",
"fcomm",
"lurk-macros",
"lurk-metrics"
]
members = ["clutch", "fcomm", "lurk-macros", "lurk-metrics"]

# Dependencies that should be kept in sync through the whole workspace
[workspace.dependencies]
Expand Down Expand Up @@ -199,4 +197,4 @@ harness = false

[patch.crates-io]
# This is needed to ensure halo2curves, which imports pasta-curves, uses the *same* traits in bn256_grumpkin
pasta_curves = { git="https://github.com/lurk-lab/pasta_curves", branch="dev" }
pasta_curves = { git = "https://github.com/lurk-lab/pasta_curves", branch = "dev" }
14 changes: 14 additions & 0 deletions benches/common/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
use camino::Utf8PathBuf;
use lurk::cli::paths::lurk_default_dir;
use lurk::config::lurk_config;
use once_cell::sync::Lazy;

/// Edit this path to use a config file specific to benchmarking
/// E.g. `Utf8PathBuf::from("/home/<user>/lurk-rs/lurk-bench.toml");`
pub static BENCH_CONFIG_PATH: Lazy<Utf8PathBuf> =
Lazy::new(|| lurk_default_dir().join("lurk.toml"));

/// Sets the config settings with the given file
pub fn set_bench_config() {
lurk_config(Some(&BENCH_CONFIG_PATH), None);
}
48 changes: 21 additions & 27 deletions benches/end2end.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use camino::Utf8Path;
use criterion::{
black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, SamplingMode,
};
Expand All @@ -25,7 +24,9 @@ use lurk::{
use std::time::Duration;
use std::{cell::RefCell, rc::Rc, sync::Arc};

const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params";
mod common;
use common::set_bench_config;

const DEFAULT_REDUCTION_COUNT: usize = 10;

fn go_base<F: LurkField>(store: &Store<F>, state: Rc<RefCell<State>>, a: u64, b: u64) -> Ptr<F> {
Expand Down Expand Up @@ -57,6 +58,7 @@ fn end2end_benchmark(c: &mut Criterion) {
.measurement_time(Duration::from_secs(120))
.sample_size(10);

set_bench_config();
let limit = 1_000_000_000;
let lang_pallas = Lang::<Fq, Coproc<Fq>>::new();
let lang_pallas_rc = Arc::new(lang_pallas.clone());
Expand All @@ -74,11 +76,7 @@ fn end2end_benchmark(c: &mut Criterion) {
true,
Kind::NovaPublicParams,
);
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(
&instance,
Utf8Path::new(PUBLIC_PARAMS_PATH),
)
.unwrap();
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap();

let size = (10, 0);
let benchmark_id = BenchmarkId::new("end2end_go_base_nova", format!("_{}_{}", size.0, size.1));
Expand Down Expand Up @@ -241,6 +239,7 @@ fn prove_benchmark(c: &mut Criterion) {
.measurement_time(Duration::from_secs(120))
.sample_size(10);

set_bench_config();
let limit = 1_000_000_000;
let lang_pallas = Lang::<Fq, Coproc<Fq>>::new();
let lang_pallas_rc = Arc::new(lang_pallas.clone());
Expand All @@ -251,17 +250,15 @@ fn prove_benchmark(c: &mut Criterion) {
let benchmark_id = BenchmarkId::new("prove_go_base_nova", format!("_{}_{}", size.0, size.1));

let state = State::init_lurk_state().rccell();

// use cached public params
let instance = Instance::new(
reduction_count,
lang_pallas_rc.clone(),
true,
Kind::NovaPublicParams,
);
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(
&instance,
Utf8Path::new(PUBLIC_PARAMS_PATH),
)
.unwrap();
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap();

group.bench_with_input(benchmark_id, &size, |b, &s| {
let ptr = go_base::<Fq>(&store, state.clone(), s.0, s.1);
Expand Down Expand Up @@ -293,6 +290,7 @@ fn prove_compressed_benchmark(c: &mut Criterion) {
.measurement_time(Duration::from_secs(120))
.sample_size(10);

set_bench_config();
let limit = 1_000_000_000;
let lang_pallas = Lang::<Fq, Coproc<Fq>>::new();
let lang_pallas_rc = Arc::new(lang_pallas.clone());
Expand All @@ -306,17 +304,15 @@ fn prove_compressed_benchmark(c: &mut Criterion) {
);

let state = State::init_lurk_state().rccell();

// use cached public params
let instance = Instance::new(
reduction_count,
lang_pallas_rc.clone(),
true,
Kind::NovaPublicParams,
);
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(
&instance,
Utf8Path::new(PUBLIC_PARAMS_PATH),
)
.unwrap();
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap();

group.bench_with_input(benchmark_id, &size, |b, &s| {
let ptr = go_base::<Fq>(&store, state.clone(), s.0, s.1);
Expand Down Expand Up @@ -349,24 +345,23 @@ fn verify_benchmark(c: &mut Criterion) {
.measurement_time(Duration::from_secs(10))
.sample_size(10);

set_bench_config();
let limit = 1_000_000_000;
let lang_pallas = Lang::<Fq, Coproc<Fq>>::new();
let lang_pallas_rc = Arc::new(lang_pallas.clone());
let store = Store::default();
let reduction_count = DEFAULT_REDUCTION_COUNT;

let state = State::init_lurk_state().rccell();

// use cached public params
let instance = Instance::new(
reduction_count,
lang_pallas_rc.clone(),
true,
Kind::NovaPublicParams,
);
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(
&instance,
Utf8Path::new(PUBLIC_PARAMS_PATH),
)
.unwrap();
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap();

let sizes = vec![(10, 0)];
for size in sizes {
Expand Down Expand Up @@ -411,24 +406,23 @@ fn verify_compressed_benchmark(c: &mut Criterion) {
.measurement_time(Duration::from_secs(10))
.sample_size(10);

set_bench_config();
let limit = 1_000_000_000;
let lang_pallas = Lang::<Fq, Coproc<Fq>>::new();
let lang_pallas_rc = Arc::new(lang_pallas.clone());
let store = Store::default();
let reduction_count = DEFAULT_REDUCTION_COUNT;

let state = State::init_lurk_state().rccell();

// use cached public params
let instance = Instance::new(
reduction_count,
lang_pallas_rc.clone(),
true,
Kind::NovaPublicParams,
);
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(
&instance,
Utf8Path::new(PUBLIC_PARAMS_PATH),
)
.unwrap();
let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap();

let sizes = vec![(10, 0)];
for size in sizes {
Expand Down
Loading

0 comments on commit 962f220

Please sign in to comment.