From d9c81c1c2d0c3220884d2c983259721bfd17ecd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois=20Garillot?= <4142+huitseeker@users.noreply.github.com> Date: Wed, 8 Nov 2023 17:12:29 -0500 Subject: [PATCH] chore: Remove 'fcomm' package and associated files (#871) - Removed the entire `fcomm` module, :wave: - Deleted numerous `fcomm` related source and snapshot files from the project. - Removed `test_make_fcomm_examples` from excludedLinux Tests phase in GitHub Actions workflow - Updated README link to fcomm -> demo under 'Proofs' section. - Removed `fcomm` as workspace member in `Cargo.toml` and from GitHub Actions CI/CD setup. --- .github/workflows/merge-tests.yml | 2 +- Cargo.toml | 2 +- README.md | 2 +- fcomm/.clippy.toml | 1 - fcomm/.gitignore | 2 - fcomm/Cargo.toml | 46 - fcomm/README.md | 69 - fcomm/examples/.gitignore | 4 - fcomm/examples/Makefile | 49 - fcomm/examples/chained-function.json | 1 - fcomm/examples/chained-input.lurk | 1 - fcomm/examples/chained-request.json | 1 - fcomm/examples/eval-input.lurk | 1 - fcomm/examples/fcomm | 1 - fcomm/examples/fibonacci.json | 2 - fcomm/examples/fibonacci.lurk | 11 - fcomm/examples/num-list-function.json | 1 - fcomm/examples/num-list-input.lurk | 7 - fcomm/src/bin/fcomm.rs | 546 ------- fcomm/src/error.rs | 35 - fcomm/src/file_map.rs | 96 -- fcomm/src/lib.rs | 1331 ----------------- ...st__snapshot_serialized_expressions-2.snap | 9 - ...st__snapshot_serialized_expressions-3.snap | 5 - ...st__snapshot_serialized_expressions-4.snap | 13 - ...st__snapshot_serialized_expressions-5.snap | 11 - ...test__snapshot_serialized_expressions.snap | 11 - fcomm/tests/makefile_tests.rs | 47 - fcomm/tests/proof_tests.rs | 331 ---- 29 files changed, 3 insertions(+), 2635 deletions(-) delete mode 120000 fcomm/.clippy.toml delete mode 100644 fcomm/.gitignore delete mode 100644 fcomm/Cargo.toml delete mode 100644 fcomm/README.md delete mode 100644 fcomm/examples/.gitignore delete mode 100644 fcomm/examples/Makefile delete mode 100644 fcomm/examples/chained-function.json delete mode 100644 fcomm/examples/chained-input.lurk delete mode 100644 fcomm/examples/chained-request.json delete mode 100644 fcomm/examples/eval-input.lurk delete mode 120000 fcomm/examples/fcomm delete mode 100644 fcomm/examples/fibonacci.json delete mode 100644 fcomm/examples/fibonacci.lurk delete mode 100644 fcomm/examples/num-list-function.json delete mode 100644 fcomm/examples/num-list-input.lurk delete mode 100644 fcomm/src/bin/fcomm.rs delete mode 100644 fcomm/src/error.rs delete mode 100644 fcomm/src/file_map.rs delete mode 100644 fcomm/src/lib.rs delete mode 100644 fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-2.snap delete mode 100644 fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-3.snap delete mode 100644 fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-4.snap delete mode 100644 fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-5.snap delete mode 100644 fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions.snap delete mode 100644 fcomm/tests/makefile_tests.rs delete mode 100644 fcomm/tests/proof_tests.rs diff --git a/.github/workflows/merge-tests.yml b/.github/workflows/merge-tests.yml index c7e487c5f8..25cbd8ee42 100644 --- a/.github/workflows/merge-tests.yml +++ b/.github/workflows/merge-tests.yml @@ -26,7 +26,7 @@ jobs: - uses: Swatinem/rust-cache@v2 - name: Linux Tests run: | - cargo nextest run --profile ci --workspace --cargo-profile dev-ci --run-ignored ignored-only -E 'all() - test(test_make_fcomm_examples) - test(test_demo)' + cargo nextest run --profile ci --workspace --cargo-profile dev-ci --run-ignored ignored-only -E 'all() - test(test_demo)' linux-arm: if: github.event_name != 'pull_request' || github.event.action == 'enqueued' diff --git a/Cargo.toml b/Cargo.toml index 4ab4a98654..7c480ebccc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -112,7 +112,7 @@ vergen = { version = "8", features = ["build", "git", "gitcl"] } [workspace] resolver = "2" -members = ["fcomm", "lurk-macros", "lurk-metrics"] +members = ["lurk-macros", "lurk-metrics"] # Dependencies that should be kept in sync through the whole workspace [workspace.dependencies] diff --git a/README.md b/README.md index 7e4545b220..2e517b278e 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ Lurk's distinguishing feature relative to most zk-SNARK authoring languages is t # Proofs -Integration with backend proving systems and tooling for proof generation are both still very early. Performance and user experience still have room for significant optimization and improvement, but simple examples can be found in the [fcomm example directory](fcomm/README.md). +Integration with backend proving systems and tooling for proof generation are both still very early. Performance and user experience still have room for significant optimization and improvement, but simple examples can be found in the [demo example directory](demo/). # Backends - Nova is Lurk's officially-supported IVC backend. It uses Lurk Lab's Arecibo fork of the [Nova proving system](https://github.com/lurk-lab/arecibo) and the Pasta Curves. diff --git a/fcomm/.clippy.toml b/fcomm/.clippy.toml deleted file mode 120000 index c5ecbcbfac..0000000000 --- a/fcomm/.clippy.toml +++ /dev/null @@ -1 +0,0 @@ -../.clippy.toml \ No newline at end of file diff --git a/fcomm/.gitignore b/fcomm/.gitignore deleted file mode 100644 index 20c4986e01..0000000000 --- a/fcomm/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/target -params diff --git a/fcomm/Cargo.toml b/fcomm/Cargo.toml deleted file mode 100644 index c912ea4edf..0000000000 --- a/fcomm/Cargo.toml +++ /dev/null @@ -1,46 +0,0 @@ -[package] -name = "fcomm" -version = "0.1.1" -authors = ["porcuquine "] -license = "MIT OR Apache-2.0" -description = "Functional commitments with Lurk" -edition = "2021" -repository = "https://github.com/lurk-lab/lurk-rs" - -[[bin]] -name = "fcomm" -path = "src/bin/fcomm.rs" - -[dependencies] -abomonation = { workspace = true } -anyhow = { workspace = true } -base64 = { workspace = true } -bellpepper-core = { workspace = true } -bincode = { workspace = true } -camino = { workspace = true } -clap = { workspace = true, features = ["derive"] } -clap-verbosity-flag = "2.0" -ff = { workspace = true } -hex = { version = "0.4.3", features = ["serde"] } -lurk = { path = "../", package = "lurk" } -lurk-macros = { path = "../lurk-macros" } -nova = { workspace = true } -once_cell = { workspace = true } -pairing = { workspace = true } -pasta_curves = { workspace = true, features = ["repr-c", "serde"] } -proptest = { workspace = true } -proptest-derive = { workspace = true } -rand = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -thiserror = { workspace = true } -tracing = { workspace = true } -tracing-texray = { workspace = true } -tracing-subscriber = { workspace = true, features = ["env-filter"] } - -[dev-dependencies] -assert_cmd = "2.0.12" -insta = { version = "1.31.0", features = ["json"] } -num_cpus = "1.16.0" -predicates = "3.0.4" -tempfile = { workspace = true } diff --git a/fcomm/README.md b/fcomm/README.md deleted file mode 100644 index 2b4811cb4c..0000000000 --- a/fcomm/README.md +++ /dev/null @@ -1,69 +0,0 @@ -# fcomm: Functional Commitments - -## This example is a work in progress, for demonstration purposes only, and subject to change. - -The `fcomm` CLI exposes an interface for creating and verifying Lurk proofs, and for manipulating functional commitments. - -# Functional Commitments -TODO: Explanation of functional commitment interface. - -# Creating and Verifying Evaluation Proofs - -To see how proofs work, first navigate to the `fcomm/examples` directory. From the `lurk-rs` project root, -```bash -➜ lurk-rs git:(master) ✗ cd fcomm/examples -➜ examples git:(master) ✗ -``` - -To generate a very simple proof, type the following command (this will be surprisingly slow): - -```bash -> make fibonacci-proof -``` - -To see the program whose evaluation was proved, see its [source](examples/fibonacci.lurk). - -To see what the generated proof object claim's to attest, see the claim section of the generated json. This can be viewed more legibly if you have a JSON formatter like `jq` installed: - -```bash -➜ examples ✗ cat fibonacci-proof.json| jq | more -``` - -Yielding something like: -```json -{ - "claim": { - "Evaluation": { - "expr": "(LETREC ((NEXT (LAMBDA (A B N TARGET) (IF (EQ N TARGET) A (NEXT B (+ A B) (+ 1 N) TARGET)))) (FIB (NEXT 0 1 0))) (FIB 1))", - "env": "NIL", - "cont": "Outermost", - "expr_out": "1", - "env_out": "NIL", - "cont_out": "Terminal", - "status": "Terminal", - "iterations": null - } - }, - "proof": { - "Recursive": { - ... - } - } -} - - -To verify the generated proof: - -```bash -> make verify-fibonacci-proof -``` - -Please note the following limitations: -- Proof as serialized here are not optimized for size. -- To simplify reproducibility in development and for example purposes, these parameters are deterministically generated on-demand. -- The parameters are currently uncached. -- This adds time to both proof and verification. -- For larger values of the `ReductionCount` option (see: [lib.rs](src/lib.rs)), this can be significant. -- Even for the smallest circuits used in the default examples, this leads to deceptively slow verification. - -To see the commands that were used, see the [Makefile](examples/Makefile). diff --git a/fcomm/examples/.gitignore b/fcomm/examples/.gitignore deleted file mode 100644 index 2ad77497d7..0000000000 --- a/fcomm/examples/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*proof*.json -*opening*.json -*commitment*.json -*claim*.json diff --git a/fcomm/examples/Makefile b/fcomm/examples/Makefile deleted file mode 100644 index 0d2d9c3d87..0000000000 --- a/fcomm/examples/Makefile +++ /dev/null @@ -1,49 +0,0 @@ -all: build eval \ - eval-fibonacci verify-fibonacci-proof verify-fibonacci2-proof \ - verify-num-list-opening \ - verify-chained-opening verify-chained2-opening verify-chained2-from_req-opening - -clean: - rm -f *proof*.json *opening*.json *commitment*.json *claim*.json - -build : - [ -f ../../target/release/fcomm ] || cargo build --release - -%-commitment %-commitment.json : %-function.json - cargo run --release -- commit --function $*-function.json --commitment $*-commitment.json - -%-opening %-opening.json : %-function.json %-input.lurk %-commitment.json - cargo run --release -- open --function $*-function.json --input $*-input.lurk --proof $*-opening.json - -verify-%-proof verify-%-proof.json : %-proof.json - cargo run --release -- verify --proof $*-proof.json - -verify-%-opening verify-%-opening.json : %-opening.json - cargo run --release -- verify --proof $*-opening.json - -eval : - cargo run --release eval --expression eval-input.lurk --lurk - -eval-fibonacci : - cargo run --release -- eval --expression fibonacci.lurk --lurk - -fibonacci-claim.json : fibonacci.lurk - cargo run --release -- eval --expression fibonacci.lurk --claim fibonacci-claim.json --lurk - -fibonacci-proof fibonacci-proof.json : fibonacci.lurk - cargo run --release -- prove --expression fibonacci.lurk --proof fibonacci-proof.json --lurk - -fibonacci2-proof fibonacci2-proof.json : fibonacci-claim.json - cargo run --release -- prove --claim fibonacci-claim.json --proof fibonacci2-proof.json - -chained-opening.json : chained-function.json chained-input.lurk chained-commitment - cargo run --release open --function chained-function.json --input chained-input.lurk --proof chained-opening.json --chain - -chained2-opening chained2-opening.json : chained-input.lurk chained-opening - # NOTE: This concrete commitment (1d52c2a9a97ed0fd4755e89b4310f1edc7d2c0ef3e250c4e5b51dacefc76caf5) can be found - # in chained-opening.json, but its identity is determistic based on the secret and 'second return value' (cdr) - # of the function specified in chained-function.json. - cargo run --release open --commitment 1d52c2a9a97ed0fd4755e89b4310f1edc7d2c0ef3e250c4e5b51dacefc76caf5 --input chained-input.lurk --proof chained2-opening.json --chain - -chained2-from_req-opening chained2-from_req-opening.json : chained2-opening chained-request.json - cargo run --release open --request chained-request.json --proof chained2-from_req-opening.json diff --git a/fcomm/examples/chained-function.json b/fcomm/examples/chained-function.json deleted file mode 100644 index 954dff1b65..0000000000 --- a/fcomm/examples/chained-function.json +++ /dev/null @@ -1 +0,0 @@ -{"expr":{"Source":"(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))"}} diff --git a/fcomm/examples/chained-input.lurk b/fcomm/examples/chained-input.lurk deleted file mode 100644 index 7ed6ff82de..0000000000 --- a/fcomm/examples/chained-input.lurk +++ /dev/null @@ -1 +0,0 @@ -5 diff --git a/fcomm/examples/chained-request.json b/fcomm/examples/chained-request.json deleted file mode 100644 index 107a7025c4..0000000000 --- a/fcomm/examples/chained-request.json +++ /dev/null @@ -1 +0,0 @@ -{"commitment":"1d52c2a9a97ed0fd4755e89b4310f1edc7d2c0ef3e250c4e5b51dacefc76caf5","input":{"expr":{"Source":"9"}},"chain":true} diff --git a/fcomm/examples/eval-input.lurk b/fcomm/examples/eval-input.lurk deleted file mode 100644 index 4d956a64c4..0000000000 --- a/fcomm/examples/eval-input.lurk +++ /dev/null @@ -1 +0,0 @@ -(+ 1 2) diff --git a/fcomm/examples/fcomm b/fcomm/examples/fcomm deleted file mode 120000 index a656ab820c..0000000000 --- a/fcomm/examples/fcomm +++ /dev/null @@ -1 +0,0 @@ -../../target/release/fcomm \ No newline at end of file diff --git a/fcomm/examples/fibonacci.json b/fcomm/examples/fibonacci.json deleted file mode 100644 index 718076f9aa..0000000000 --- a/fcomm/examples/fibonacci.json +++ /dev/null @@ -1,2 +0,0 @@ -{"expr":{"Source":"(letrec ((next (lambda (a b n target) (if (eq n target) a (next b (+ a b) (+ 1 n) target)))) (fib (next 0 1 0))) (fib 1))"}} - diff --git a/fcomm/examples/fibonacci.lurk b/fcomm/examples/fibonacci.lurk deleted file mode 100644 index 1616715a80..0000000000 --- a/fcomm/examples/fibonacci.lurk +++ /dev/null @@ -1,11 +0,0 @@ -;; (FIB TARGET) computes the element of the Fibonacci sequence at TARGET (zero-indexed). -(letrec ((next (lambda (a b n target) - (if (eq n target) - a - (next b - (+ a b) - (+ 1 n) - target)))) - (fib (next 0 1 0))) - (fib 5)) - diff --git a/fcomm/examples/num-list-function.json b/fcomm/examples/num-list-function.json deleted file mode 100644 index c99ceeb799..0000000000 --- a/fcomm/examples/num-list-function.json +++ /dev/null @@ -1 +0,0 @@ -{"expr":{"Source":"(let ((nums '(1 2 3 4 5))) (lambda (f) (f nums)))"}} diff --git a/fcomm/examples/num-list-input.lurk b/fcomm/examples/num-list-input.lurk deleted file mode 100644 index 573c1adb3d..0000000000 --- a/fcomm/examples/num-list-input.lurk +++ /dev/null @@ -1,7 +0,0 @@ -(letrec ((sum-aux (lambda (acc nums) - (if nums - (sum-aux (+ acc (car nums)) (cdr nums)) - acc))) - (sum (sum-aux 0))) - (lambda (nums) - (sum nums))) diff --git a/fcomm/src/bin/fcomm.rs b/fcomm/src/bin/fcomm.rs deleted file mode 100644 index 1d9bbdc130..0000000000 --- a/fcomm/src/bin/fcomm.rs +++ /dev/null @@ -1,546 +0,0 @@ -use abomonation::Abomonation; -use fcomm::file_map::data_dir; -use lurk::circuit::circuit_frame::MultiFrame; -use lurk::lurk_sym_ptr; -use lurk::proof::nova::{CurveCycleEquipped, G1, G2}; -use lurk::public_parameters::instance::{Instance, Kind}; -use nova::traits::Group; -use std::convert::TryFrom; -use std::env; -use std::fs::read_to_string; -use std::io; -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use tracing::info; -use tracing_subscriber::{fmt, prelude::*, EnvFilter, Registry}; - -use hex::FromHex; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; - -use lurk::eval::{ - lang::{Coproc, Lang}, - IO, -}; -use lurk::field::LurkField; -use lurk::proof::{nova::NovaProver, Prover}; -use lurk::ptr::{Ptr, TypePredicates}; -use lurk::public_parameters::error; -use lurk::store::Store; - -use clap::{Args, Parser, Subcommand}; -use clap_verbosity_flag::{Verbosity, WarnLevel}; - -use fcomm::{ - committed_expression_store, error::Error, evaluate, file_map::FileStore, Claim, Commitment, - CommittedExpression, Evaluation, Expression, LurkPtr, Opening, OpeningRequest, Proof, - ReductionCount, S1, -}; - -use lurk::public_parameters::public_params; - -/// Functional commitments -#[derive(Parser, Debug)] -#[clap(version, about, long_about = None)] -struct Cli { - /// Evaluate inputs before passing to function (outside the proof) when opening. Otherwise inputs are unevaluated. - #[clap(long, value_parser)] - eval_input: bool, - - /// Iteration limit - #[allow(deprecated)] - #[clap(short, long, default_value = "1000", value_parser)] - limit: usize, - - /// Exit with error on failed verification - #[clap(short, long, value_parser)] - error: bool, - - /// Be verbose - #[clap(flatten)] - verbose: Verbosity, - - #[clap(subcommand)] - command: Command, -} - -#[derive(Subcommand, Debug)] -enum Command { - /// Creates a hiding commitment to a function - Commit(Commit), - - /// Creates an opening - Open(Open), - - /// Evaluates an expression - Eval(Eval), - - /// Generates a proof for the given expression - Prove(Prove), - - /// Verifies a proof - Verify(Verify), -} - -#[derive(Args, Debug)] -struct Commit { - /// Path to function - #[clap(short, long, value_parser)] - function: PathBuf, - - /// Path to functional commitment - #[clap(short, long, value_parser)] - commitment: Option, - - // Function is lurk source. - #[clap(long, value_parser)] - lurk: bool, -} - -#[derive(Args, Debug)] -struct Open { - /// Path to function input - #[clap(short, long, value_parser)] - input: Option, - - /// Path to proof output if prove requested - #[clap(short, long, value_parser)] - proof: Option, - - /// Number of circuit reductions per step - #[clap(short = 'r', long, default_value = "10", value_parser)] - reduction_count: usize, - - /// Optional commitment value (hex string). Function will be looked-up by commitment if supplied. - #[clap(short, long, value_parser)] - commitment: Option, - - /// Optional path to function used if commitment is not supplied. - #[clap(short, long, value_parser)] - function: Option, - - /// Optional path to OpeningRequest -- which subsumes commitment, function, and input if supplied. - #[clap(long, value_parser)] - request: Option, - - /// Function is lurk source. - #[clap(long, value_parser)] - lurk: bool, - - /// Chain commitment openings. Opening includes commitment to new function along with output. - #[clap(long, value_parser)] - chain: bool, - - /// Quote input before passing to function when opening. Otherwise input will be passed unevaluated and unquoted. --quote-input and --eval-input would cancel each other out if used in conjunction, so is probably not what is desired. - #[clap(long, value_parser)] - quote_input: bool, -} - -#[derive(Args, Debug)] -struct Eval { - /// Path to expression source - #[clap(short = 'x', long, value_parser)] - expression: PathBuf, - - /// Wrap evaluation result in a claim - #[clap(long, value_parser)] - claim: Option, - - // Expression is lurk source. - #[clap(long, value_parser)] - lurk: bool, -} - -#[derive(Args, Debug)] -struct Prove { - /// Path to expression source - #[clap(short = 'x', long, value_parser)] - expression: Option, - - /// Path to proof output - #[clap(short, long, value_parser)] - proof: PathBuf, - - /// Number of circuit reductions per step - #[clap(short = 'r', long, default_value = "10", value_parser)] - reduction_count: usize, - - /// Path to claim to prove - #[clap(long, value_parser)] - claim: Option, - - // Expression is lurk source. - #[clap(long, value_parser)] - lurk: bool, -} - -#[derive(Args, Debug)] -struct Verify { - /// Path to proof input - #[clap(short, long, value_parser)] - proof: PathBuf, -} - -impl Commit { - fn commit(&self, limit: usize, lang: &Lang>) { - let s = &mut Store::::default(); - - let mut function = if self.lurk { - let path = env::current_dir() - .expect("env current dir") - .join(&self.function); - let src = read_to_string(path).expect("src read_to_string"); - - CommittedExpression { - expr: LurkPtr::Source(src), - secret: None, - commitment: None, - } - } else { - CommittedExpression::read_from_json_path(&self.function) - .expect("committed expression read_from_path") - }; - let fun_ptr = function.expr_ptr(s, limit, lang).expect("fun_ptr"); - let function_map = committed_expression_store(); - - let commitment = if let Some(secret) = function.secret { - Commitment::from_ptr_and_secret(s, &fun_ptr, secret).unwrap() - } else { - let (commitment, secret) = Commitment::from_ptr_with_hiding(s, &fun_ptr).unwrap(); - function.secret = Some(secret); - commitment - }; - function.commitment = Some(commitment); - - function_map - .set(&commitment, &function) - .expect("function_map set"); - function.write_to_json_path(&self.function); - - if let Some(commitment_path) = &self.commitment { - commitment.write_to_json_path(commitment_path); - } else { - serde_json::to_writer(io::stdout(), &commitment).expect("serde_json to_writer"); - } - } -} - -impl Open { - fn open(&self, limit: usize, eval_input: bool, lang: &Lang>) { - assert!( - !(self.commitment.is_some() && self.function.is_some()), - "commitment and function must not both be supplied" - ); - - let s = &mut Store::::default(); - let rc = ReductionCount::try_from(self.reduction_count).expect("reduction count"); - let prover = NovaProver::<'_, S1, Coproc, MultiFrame<'_, S1, Coproc>>::new( - rc.count(), - lang.clone(), - ); - let lang_rc = Arc::new(lang.clone()); - let instance = Instance::new(rc.count(), lang_rc, true, Kind::NovaPublicParams); - let pp = public_params(&instance).expect("public params"); - let function_map = committed_expression_store(); - - let handle_proof = |out_path, proof: Proof<'_, S1>| { - proof.write_to_json_path(out_path); - proof - .verify(&pp, lang) - .expect("created opening doesn't verify"); - }; - - let handle_claim = |claim: Claim| serde_json::to_writer(io::stdout(), &claim); - - let lang_rc = Arc::new(lang.clone()); - if let Some(request_path) = &self.request { - assert!(!self.chain, "chain and request may not both be specified"); - let request = opening_request(request_path).expect("failed to read opening request"); - - if let Some(out_path) = &self.proof { - let proof = - Opening::open_and_prove(s, &request, limit, false, &prover, &pp, &lang_rc) - .expect("proof opening"); - - handle_proof(out_path, proof); - } else { - let function = function_map - .get(&request.commitment) - .expect("committed function not found"); - let input = request.input.eval(s, limit, lang).unwrap(); - - let claim = Opening::apply(s, input, &function, limit, self.chain, lang) - .expect("claim apply"); - handle_claim(claim).expect("handle claim") - } - } else { - let function = if let Some(comm_string) = &self.commitment { - let commitment = Commitment::from_hex(comm_string) - .map_err(Error::CommitmentParseError) - .unwrap(); - - function_map - .get(&commitment) - .expect("committed function not found") - } else { - let function_path = self.function.as_ref().expect("function missing"); - if self.lurk { - let path = env::current_dir().unwrap().join(function_path); - let src = read_to_string(path).unwrap(); - CommittedExpression { - expr: LurkPtr::Source(src), - secret: None, - commitment: None, - } - } else { - CommittedExpression::read_from_json_path(function_path).unwrap() - } - }; - - let input_path = self.input.as_ref().expect("input missing"); - let input = - input(s, input_path, eval_input, limit, self.quote_input, lang).expect("input"); - - if let Some(out_path) = &self.proof { - let proof = Opening::apply_and_prove( - s, input, &function, limit, self.chain, false, &prover, &pp, &lang_rc, - ) - .expect("apply and prove"); - - handle_proof(out_path, proof); - } else { - let claim = Opening::apply(s, input, &function, limit, self.chain, lang).unwrap(); - - handle_claim(claim).unwrap(); - } - }; - } -} - -impl Eval { - fn eval(&self, limit: usize, lang: &Lang>) { - let s = &mut Store::::default(); - - let expr = expression(s, &self.expression, self.lurk, limit, lang).unwrap(); - - let evaluation = Evaluation::eval(s, expr, limit).unwrap(); - - match &self.claim { - Some(out_path) => { - let claim = Claim::::Evaluation(evaluation); - claim.write_to_json_path(out_path); - } - None => { - serde_json::to_writer(io::stdout(), &evaluation).unwrap(); - } - } - } -} - -impl Prove { - fn prove(&self, limit: usize, lang: &Lang>) { - let s = &mut Store::::default(); - let rc = ReductionCount::try_from(self.reduction_count).unwrap(); - let prover = NovaProver::<'_, S1, Coproc, MultiFrame<'_, S1, Coproc>>::new( - rc.count(), - lang.clone(), - ); - let lang_rc = Arc::new(lang.clone()); - let instance = Instance::new(rc.count(), lang_rc.clone(), true, Kind::NovaPublicParams); - let pp = public_params(&instance).unwrap(); - - let proof = match &self.claim { - Some(claim) => { - assert!( - self.expression.is_none(), - "claim and expression must not both be supplied" - ); - Proof::prove_claim( - s, - &Claim::read_from_json_path(claim).unwrap(), - limit, - false, - &prover, - &pp, - &lang_rc, - ) - .unwrap() - } - - None => { - let expr = expression( - s, - self.expression.as_ref().expect("expression missing"), - self.lurk, - limit, - lang, - ) - .unwrap(); - - Proof::eval_and_prove(s, expr, None, limit, false, &prover, &pp, &lang_rc).unwrap() - } - }; - - // Write first, so prover can debug if proof doesn't verify (it should). - proof.write_to_json_path(&self.proof); - proof - .verify(&pp, lang) - .expect("created proof doesn't verify"); - } -} - -impl Verify { - fn verify(&self, cli_error: bool, lang: &Lang>) { - let proof = proof(Some(&self.proof)).unwrap(); - let lang_rc = Arc::new(lang.clone()); - let instance = Instance::new( - proof.reduction_count.count(), - lang_rc, - true, - Kind::NovaPublicParams, - ); - let pp = public_params(&instance).unwrap(); - let result = proof.verify(&pp, lang).unwrap(); - - serde_json::to_writer(io::stdout(), &result).unwrap(); - - if result.verified { - info!("Verification succeeded."); - } else if cli_error { - serde_json::to_writer(io::stderr(), &result).unwrap(); - std::process::exit(1); - }; - } -} - -fn read_from_path, F: LurkField + Serialize>(store: &Store, path: P) -> Ptr { - let path = env::current_dir().unwrap().join(path); - let input = read_to_string(path).unwrap(); - store.read(&input).unwrap() -} - -fn read_eval_from_path, F: LurkField + Serialize>( - store: &Store, - path: P, - limit: usize, - lang: &Lang>, -) -> Result<(Ptr, Ptr), Error> { - let src = read_from_path(store, path); - let ( - IO { - expr, - env: _, - cont: _, - }, - _iterations, - ) = evaluate(store, src, None, limit, lang)?; - - Ok((expr, src)) -} - -fn read_no_eval_from_path, F: LurkField + Serialize>( - store: &Store, - path: P, -) -> (Ptr, Ptr) { - let src = read_from_path(store, path); - - let quote = lurk_sym_ptr!(store, quote); - let quoted = store.list(&[quote, src]); - (quoted, src) -} - -fn _lurk_function, F: LurkField + Serialize>( - store: &Store, - function_path: P, - limit: usize, - lang: &Lang>, -) -> (Ptr, Ptr) { - let (function, src) = - read_eval_from_path(store, function_path, limit, lang).expect("failed to read function"); - assert!(function.is_fun(), "FComm can only commit to functions."); - - (function, src) -} - -fn input, F: LurkField + Serialize>( - store: &Store, - input_path: P, - eval_input: bool, - limit: usize, - quote_input: bool, - lang: &Lang>, -) -> Result, Error> { - let input = if eval_input { - let (evaled_input, _src) = read_eval_from_path(store, input_path, limit, lang)?; - evaled_input - } else { - let (quoted, src) = read_no_eval_from_path(store, input_path); - if quote_input { - quoted - } else { - src - } - }; - - Ok(input) -} - -fn expression, F: LurkField + Serialize + DeserializeOwned>( - store: &Store, - expression_path: P, - lurk: bool, - limit: usize, - lang: &Lang>, -) -> Result, Error> { - if lurk { - Ok(read_from_path(store, expression_path)) - } else { - let expression = Expression::read_from_json_path(expression_path)?; - let expr = expression.expr.ptr(store, limit, lang); - Ok(expr) - } -} - -fn opening_request, F: LurkField + Serialize + DeserializeOwned>( - request_path: P, -) -> Result, error::Error> { - OpeningRequest::read_from_json_path(request_path) -} - -// Get proof from supplied path or else from stdin. -fn proof<'a, P: AsRef, F: CurveCycleEquipped>( - proof_path: Option

, -) -> Result, error::Error> -where - F: Serialize + for<'de> Deserialize<'de>, - < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, - < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, -{ - match proof_path { - Some(path) => Proof::read_from_json_path(path), - None => Proof::read_from_stdin(), - } -} - -fn main() { - let cli = Cli::parse(); - - let subscriber = Registry::default() - // TODO: correctly filter log level with `clap_verbosity_flag` - .with(fmt::layer().pretty()) - .with(EnvFilter::from_default_env()); - tracing::subscriber::set_global_default(subscriber).unwrap(); - - std::env::set_var("LURK_PUBLIC_PARAMS_DIR", data_dir().join("public_params")); - - // TODO: make this properly configurable, e.g. allowing coprocessors - let lang = Lang::new(); - - match &cli.command { - Command::Commit(c) => c.commit(cli.limit, &lang), - Command::Open(o) => o.open(cli.limit, cli.eval_input, &lang), - Command::Eval(e) => e.eval(cli.limit, &lang), - Command::Prove(p) => p.prove(cli.limit, &lang), - Command::Verify(v) => v.verify(cli.error, &lang), - } -} diff --git a/fcomm/src/error.rs b/fcomm/src/error.rs deleted file mode 100644 index e9c47d31d5..0000000000 --- a/fcomm/src/error.rs +++ /dev/null @@ -1,35 +0,0 @@ -use anyhow; -use bellpepper_core::SynthesisError; -use lurk::error::ReductionError; -use lurk::public_parameters::error; -use lurk::store; -use std::io; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum Error { - #[error("Verification error: {0}")] - VerificationError(String), - #[error("Unsupported reduction count: {0}")] - UnsupportedReductionCount(usize), - #[error("IO error: {0}")] - IOError(#[from] io::Error), - #[error("Synthesis error: {0}")] - SynthesisError(#[from] SynthesisError), - #[error("Commitment parser error: {0}")] - CommitmentParseError(#[from] hex::FromHexError), - #[error("Unknown commitment")] - UnknownCommitment, - #[error("Opening Failure: {0}")] - OpeningFailure(String), - #[error("Evaluation Failure")] - EvaluationFailure(ReductionError), - #[error("Store error: {0}")] - StoreError(#[from] store::Error), - #[error("Serde error: {0}")] - SerdeError(#[from] lurk::z_data::serde::SerdeError), - #[error("Anyhow error: {0}")] - AnyhowError(#[from] anyhow::Error), - #[error("Cache error: {0}")] - CacheError(#[from] error::Error), -} diff --git a/fcomm/src/file_map.rs b/fcomm/src/file_map.rs deleted file mode 100644 index 21768f0c3e..0000000000 --- a/fcomm/src/file_map.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::fs::create_dir_all; -use std::fs::File; -use std::io::{self, BufReader, BufWriter}; -use std::marker::PhantomData; -use std::path::Path; - -use lurk::public_parameters::error::Error; - -use camino::Utf8PathBuf; -use serde::{Deserialize, Serialize}; - -pub fn data_dir() -> Utf8PathBuf { - match std::env::var("FCOMM_DATA_PATH") { - Ok(name) => name.into(), - Err(_) => Utf8PathBuf::from("/var/tmp/fcomm_data/"), - } -} - -pub trait FileStore -where - Self: Sized, -{ - fn write_to_path>(&self, path: P); - fn write_to_json_path>(&self, path: P); - fn read_from_path>(path: P) -> Result; - fn read_from_json_path>(path: P) -> Result; - fn read_from_stdin() -> Result; -} - -impl FileStore for T -where - for<'de> T: Deserialize<'de>, -{ - fn write_to_path>(&self, path: P) { - let file = File::create(path).expect("failed to create file"); - let writer = BufWriter::new(&file); - bincode::serialize_into(writer, &self).expect("failed to write file"); - } - - fn write_to_json_path>(&self, path: P) { - let file = File::create(path).expect("failed to create file"); - let writer = BufWriter::new(&file); - serde_json::to_writer(writer, &self).expect("failed to write file"); - } - - fn read_from_path>(path: P) -> Result { - let file = File::open(path)?; - let reader = BufReader::new(file); - bincode::deserialize_from(reader) - .map_err(|e| Error::CacheError(format!("Cache deserialization error: {}", e))) - } - - fn read_from_json_path>(path: P) -> Result { - let file = File::open(path)?; - let reader = BufReader::new(file); - Ok(serde_json::from_reader(reader)?) - } - - fn read_from_stdin() -> Result { - let reader = BufReader::new(io::stdin()); - Ok(serde_json::from_reader(reader).expect("failed to read from stdin")) - } -} - -#[derive(Debug)] -pub struct FileMap { - dir: Utf8PathBuf, - _t: PhantomData<(K, V)>, -} - -impl FileMap { - pub fn new>(name: P) -> Result { - let data_dir = data_dir().as_std_path().join(name); - let dir = Utf8PathBuf::from_path_buf(data_dir).expect("path contains invalid Unicode"); - create_dir_all(&dir)?; - - Ok(Self { - dir, - _t: Default::default(), - }) - } - - fn key_path(&self, key: &K) -> Utf8PathBuf { - self.dir.join(Utf8PathBuf::from(key.to_string())) - } - - pub fn get(&self, key: &K) -> Option { - self.key_path(key); - V::read_from_path(self.key_path(key)).ok() - } - - pub fn set(&self, key: &K, data: &V) -> Result<(), Error> { - data.write_to_path(self.key_path(key)); - Ok(()) - } -} diff --git a/fcomm/src/lib.rs b/fcomm/src/lib.rs deleted file mode 100644 index f04d087ed6..0000000000 --- a/fcomm/src/lib.rs +++ /dev/null @@ -1,1331 +0,0 @@ -use ::nova::traits::Group; -use abomonation::Abomonation; -use std::convert::TryFrom; -use std::sync::Arc; -use tracing::info; - -#[cfg(not(target_arch = "wasm32"))] -use proptest::prelude::*; -#[cfg(not(target_arch = "wasm32"))] -use proptest_derive::Arbitrary; - -use ff::PrimeField; -use hex::FromHex; -#[cfg(not(target_arch = "wasm32"))] -use lurk::field::FWrap; -use lurk::{ - circuit::{circuit_frame::MultiFrame, ToInputs}, - eval::{ - empty_sym_env, - lang::{Coproc, Lang}, - Evaluable, Evaluator, Status, Witness, IO, - }, - field::LurkField, - hash::PoseidonCache, - lurk_sym_ptr, - proof::nova::{self, NovaProver, PublicParams, G1, G2}, - proof::Prover, - ptr::{ContPtr, Ptr}, - state::initial_lurk_state, - store::Store, - tag::ExprTag, - writer::Write, - z_expr::ZExpr, - z_ptr::ZExprPtr, - z_store::ZStore, -}; -#[cfg(not(target_arch = "wasm32"))] -use lurk_macros::serde_test; - -#[allow(unused_imports)] // this is used in the serde_test macro -#[cfg(not(target_arch = "wasm32"))] -use lurk::z_data; - -use lurk::{error::ReductionError, proof::nova::CurveCycleEquipped}; -use once_cell::sync::OnceCell; -use pasta_curves::pallas; -use rand::rngs::OsRng; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use crate::file_map::FileMap; - -pub mod error; -pub mod file_map; - -use error::Error; - -pub const DEFAULT_REDUCTION_COUNT: ReductionCount = ReductionCount::Ten; -pub static VERBOSE: OnceCell = OnceCell::new(); - -pub type S1 = pallas::Scalar; - -mod base64 { - use serde::{Deserialize, Serialize}; - use serde::{Deserializer, Serializer}; - - pub(crate) fn serialize(v: &Vec, s: S) -> Result { - let base64 = base64::encode(v); - String::serialize(&base64, s) - } - - pub(crate) fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { - let base64 = String::deserialize(d)?; - base64::decode(base64.as_bytes()).map_err(serde::de::Error::custom) - } -} - -pub type NovaProofCache = FileMap>; -#[must_use] -pub fn nova_proof_cache(reduction_count: usize) -> NovaProofCache { - FileMap::>::new(format!("nova_proofs.{reduction_count}")).unwrap() -} - -pub type CommittedExpressionMap = FileMap, CommittedExpression>; -#[must_use] -pub fn committed_expression_store() -> CommittedExpressionMap { - FileMap::, CommittedExpression>::new("committed_expressions").unwrap() -} - -// Number of circuit reductions per step, equivalent to `chunk_frame_count` -#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] -pub enum ReductionCount { - One, - Five, - Ten, - OneHundred, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] -pub struct Evaluation { - pub expr: String, - pub env: String, - pub cont: String, - pub expr_out: String, - pub env_out: String, - pub cont_out: String, - pub status: Status, - pub iterations: Option, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -pub struct PtrEvaluation { - pub expr: LurkPtr, - pub env: LurkPtr, - pub cont: LurkCont, - pub expr_out: LurkPtr, - pub env_out: LurkPtr, - pub cont_out: LurkCont, - pub status: Status, - pub iterations: Option, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub struct Commitment { - #[cfg_attr( - not(target_arch = "wasm32"), - proptest(strategy = "any::>().prop_map(|x| x.0)") - )] - pub comm: F, -} - -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] -pub struct OpeningRequest { - pub commitment: Commitment, - pub input: Expression, - pub chain: bool, -} - -impl ToString for Commitment { - fn to_string(&self) -> String { - let s = serde_json::to_string(&self).unwrap(); - // Remove quotation marks. Yes, dumb hacks are happening. - s[1..s.len() - 1].to_string() - } -} - -impl Serialize for Commitment { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - // Use be_bytes for consistency with PrimeField printed representation. - let be_bytes: Vec = self - .comm - .to_repr() - .as_ref() - .iter() - .rev() - .map(|x| x.to_owned()) - .collect(); - - hex::serde::serialize(be_bytes, serializer) - } -} - -impl<'de, F: LurkField> Deserialize<'de> for Commitment { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - hex::serde::deserialize(deserializer) - } -} - -impl FromHex for Commitment { - type Error = hex::FromHexError; - - fn from_hex(s: T) -> Result::Error> - where - T: AsRef<[u8]>, - { - let mut v = Vec::from_hex(s)?; - v.reverse(); - let mut repr = ::Repr::default(); - repr.as_mut()[..32].copy_from_slice(&v[..]); - - Ok(Commitment { - comm: F::from_repr(repr).unwrap(), - }) - } -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -pub struct Expression { - pub expr: LurkPtr, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] -pub struct Opening { - pub input: String, - pub output: String, - pub status: Status, - pub commitment: Commitment, - pub new_commitment: Option>, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(zdata(true)))] -#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] -pub struct ZBytes { - #[serde(with = "base64")] - z_store: Vec, - #[serde(with = "base64")] - z_ptr: Vec, // can also be a scalar_cont_ptr -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] -pub struct ZStorePtr { - z_store: ZStore, - z_ptr: ZExprPtr, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] -pub enum LurkPtr { - Source(String), - ZStorePtr(ZStorePtr), -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[derive(Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq)] -pub enum LurkCont { - #[default] - Outermost, - Terminal, - Error, -} - -impl Default for LurkPtr { - fn default() -> Self { - Self::Source("nil".to_string()) - } -} - -impl Eq for LurkPtr {} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] -pub struct CommittedExpression { - pub expr: LurkPtr, - #[cfg_attr( - not(target_arch = "wasm32"), - proptest(strategy = "any::>().prop_map(|x| Some(x.0))") - )] - pub secret: Option, - pub commitment: Option>, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct VerificationResult { - pub verified: bool, -} - -#[derive(Serialize, Deserialize)] -pub struct Proof<'a, F: CurveCycleEquipped> -where - < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, - < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, -{ - pub claim: Claim, - pub proof: nova::Proof<'a, F, Coproc, MultiFrame<'a, F, Coproc>>, - pub num_steps: usize, - pub reduction_count: ReductionCount, -} - -#[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] -#[cfg_attr(not(target_arch = "wasm32"), proptest(no_bound))] -#[cfg_attr(not(target_arch = "wasm32"), serde_test(types(S1), zdata(true)))] -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub enum Claim { - Evaluation(Evaluation), - // TODO: Add Expression type - PtrEvaluation(PtrEvaluation), - Opening(Opening), -} - -impl Deserialize<'de>> Claim { - // Returns the ZPtr corresponding to the claim - pub fn proof_key(&self) -> Result, Error> { - match self { - Claim::Evaluation(eval) => { - // Only keying on input and output for now - let expr_in = ZExprPtr::::from_lurk_str(&eval.expr)?; - let expr_out = ZExprPtr::::from_lurk_str(&eval.expr_out)?; - let expr = ZExpr::Cons(expr_in, expr_out); - Ok(expr.z_ptr(&PoseidonCache::default())) - } - Claim::PtrEvaluation(ptr_eval) => { - let expr_in: ZExprPtr = match &ptr_eval.expr { - LurkPtr::Source(source) => ZExprPtr::::from_lurk_str(source)?, - LurkPtr::ZStorePtr(zsp) => zsp.z_ptr, - }; - let expr_out = match &ptr_eval.expr_out { - LurkPtr::Source(source) => ZExprPtr::::from_lurk_str(source)?, - LurkPtr::ZStorePtr(zsp) => zsp.z_ptr, - }; - let expr = ZExpr::Cons(expr_in, expr_out); - Ok(expr.z_ptr(&PoseidonCache::default())) - } - // TODO: Is this an appropriate key for commitments? - Claim::Opening(open) => { - let expr_in = ZExprPtr::::from_lurk_str(&open.input)?; - let expr_out = ZExprPtr::::from_lurk_str(&open.output)?; - let expr = ZExpr::Cons(expr_in, expr_out); - Ok(expr.z_ptr(&PoseidonCache::default())) - } - } - } -} - -// This is just a rough idea, mostly here so we can plumb it elsewhere. The idea is that a verifier can sign an -// attestation that a given claim's proof was verified. It motivates the use of an online verifier for demo purposes. -// Although real proofs should be fast to verify, they will still be large relative to a small (auditable) bundle like -// this. Even if not entirely realistic, something with this general *shape* is likely to play a role in a recursive -// system where the ability to aggregate proof verification more soundly is possible. -//#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub struct Cert { - pub claim_cid: ZExprPtr, - pub proof_cid: ZExprPtr, - pub verified: bool, - pub verifier_id: String, - pub signature: String, -} - -impl Claim { - pub(crate) fn is_opening(&self) -> bool { - self.opening().is_some() - } - pub fn evaluation(&self) -> Option { - match self { - Self::Evaluation(e) => Some(e.clone()), - _ => None, - } - } - pub(crate) fn ptr_evaluation(&self) -> Option> { - match self { - Self::PtrEvaluation(e) => Some(e.clone()), - _ => None, - } - } - pub fn opening(&self) -> Option> { - match self { - Self::Opening(o) => Some(o.clone()), - _ => None, - } - } -} - -type E = Error; -impl TryFrom for ReductionCount { - type Error = E; - - fn try_from(count: usize) -> Result>::Error> { - match count { - 1 => Ok(ReductionCount::One), - 5 => Ok(ReductionCount::Five), - 10 => Ok(ReductionCount::Ten), - 100 => Ok(ReductionCount::OneHundred), - c => Err(Error::UnsupportedReductionCount(c)), - } - } -} -impl ReductionCount { - pub fn count(&self) -> usize { - match self { - Self::One => 1, - Self::Five => 5, - Self::Ten => 10, - Self::OneHundred => 100, - } - } -} - -impl Evaluation { - fn new( - s: &Store, - input: IO, - output: IO, - iterations: Option, // This might be padded, so is not quite 'iterations' in the sense of number of actual reduction steps required - // to evaluate. - ) -> Self { - let status: Status = output.cont.into(); - let terminal = status.is_terminal(); - - // For now, conservatively hide all outputs unless output is terminal. TODO: let evaluator configure this in a - // more fine-grained way, including no hiding. - // NOTE: If anything is hidden, a proof won't be possible. - macro_rules! maybe_hide { - ($x:expr) => { - if terminal { - $x - } else { - "".to_string() - } - }; - } - - let expr = input.expr.fmt_to_string(s, initial_lurk_state()); - let env = input.env.fmt_to_string(s, initial_lurk_state()); - let cont = input.cont.fmt_to_string(s, initial_lurk_state()); - - let expr_out = maybe_hide!(output.expr.fmt_to_string(s, initial_lurk_state())); - let env_out = maybe_hide!(output.env.fmt_to_string(s, initial_lurk_state())); - let cont_out = maybe_hide!(output.cont.fmt_to_string(s, initial_lurk_state())); - - Self { - expr, - env, - cont, - expr_out, - env_out, - cont_out, - status, - iterations, - } - } - - pub fn eval( - store: &Store, - expr: Ptr, - limit: usize, - ) -> Result { - let env = empty_sym_env(store); - let lang = &Lang::>::new(); - let mut evaluator = Evaluator::new(expr, env, store, limit, lang); - - let input = evaluator.initial(); - - let (output, iterations, _) = evaluator.eval().map_err(Error::EvaluationFailure)?; - - Ok(Self::new(store, input, output, Some(iterations))) - } -} - -impl PtrEvaluation { - fn new( - s: &Store, - input: IO, - output: IO, - iterations: Option, // This might be padded, so is not quite 'iterations' in the sense of number of actual reduction steps required - // to evaluate. - ) -> Self { - let status: Status = output.cont.into(); - - // NOTE: We do not implement the `maybe_hide!` logic found in `Evaluation::new()`. That was a speculative design - // unsupported by this patch. In ny case, `Evaluation` and `PtrEvaluation` should be unified in the future, and - // an appropriate hiding mechanism/configuration can be added then. - Self { - expr: LurkPtr::from_ptr(s, &input.expr), - env: LurkPtr::from_ptr(s, &input.env), - cont: LurkCont::from_cont_ptr(s, &input.cont), - expr_out: LurkPtr::from_ptr(s, &output.expr), - env_out: LurkPtr::from_ptr(s, &output.env), - cont_out: LurkCont::from_cont_ptr(s, &output.cont), - status, - iterations, - } - } -} - -impl Commitment { - pub fn from_comm(s: &Store, ptr: &Ptr) -> Result { - assert_eq!(ExprTag::Comm, ptr.tag); - - let digest = *s - .hash_expr(ptr) - .ok_or_else(|| Error::UnknownCommitment)? - .value(); - - Ok(Commitment { comm: digest }) - } - - pub(crate) fn ptr(&self, s: &Store) -> Ptr { - s.intern_opaque_comm(self.comm) - } - - pub fn from_ptr_with_hiding(s: &Store, ptr: &Ptr) -> Result<(Self, F), Error> { - let secret = F::random(OsRng); - - let commitment = Self::from_ptr_and_secret(s, ptr, secret)?; - - Ok((commitment, secret)) - } - - pub fn from_ptr_and_secret(s: &Store, ptr: &Ptr, secret: F) -> Result { - let hidden = s.hide(secret, *ptr); - - Self::from_comm(s, &hidden) - } - - // Importantly, this ensures the function and secret are in the Store, s. - fn construct_with_fun_application( - s: &Store, - function: &CommittedExpression, - input: Ptr, - limit: usize, - lang: &Lang>, - ) -> Result<(Self, Ptr), Error> { - let fun_ptr = function.expr_ptr(s, limit, lang)?; - let secret = function.secret.expect("CommittedExpression secret missing"); - - let commitment = Self::from_ptr_and_secret(s, &fun_ptr, secret)?; - - let open = lurk_sym_ptr!(s, open); - let comm_ptr = s.hide(secret, fun_ptr); - - // (open ) - let fun_expr = s.list(&[open, comm_ptr]); - - // ((open ) input) - let expression = s.list(&[fun_expr, input]); - - Ok((commitment, expression)) - } - - fn fun_application(&self, s: &Store, input: Ptr) -> Ptr { - let open = lurk_sym_ptr!(s, open); - let comm_ptr = self.ptr(s); - - // (open ) - let fun_expr = s.list(&[open, comm_ptr]); - - // ((open commitment) input) - s.list(&[fun_expr, input]) - } -} - -impl CommittedExpression { - pub fn expr_ptr( - &self, - s: &Store, - limit: usize, - lang: &Lang>, - ) -> Result, Error> { - let source_ptr = self.expr.ptr(s, limit, lang); - - Ok(source_ptr) - } -} - -impl LurkPtr { - pub fn ptr(&self, s: &Store, limit: usize, lang: &Lang>) -> Ptr { - match self { - LurkPtr::Source(source) => { - let ptr = s.read(source).expect("could not read source"); - assert!(!ptr.raw.is_opaque()); - let (out, _) = evaluate(s, ptr, None, limit, lang).unwrap(); - - out.expr - } - LurkPtr::ZStorePtr(z_store_ptr) => { - let z_store = &z_store_ptr.z_store; - let z_ptr = z_store_ptr.z_ptr; - s.intern_z_expr_ptr(&z_ptr, z_store) - .expect("failed to intern z_ptr") - } - } - } - - pub fn from_ptr(s: &Store, ptr: &Ptr) -> Self { - let (z_store, z_ptr) = ZStore::new_with_expr(s, ptr); - let z_ptr = z_ptr.unwrap(); - Self::ZStorePtr(ZStorePtr { z_store, z_ptr }) - } -} - -impl LurkCont { - pub fn cont_ptr( - &self, - s: &Store, - ) -> ContPtr { - match self { - Self::Outermost => s.get_cont_outermost(), - Self::Terminal => s.get_cont_terminal(), - Self::Error => s.get_cont_error(), - } - } - - pub fn from_cont_ptr(_s: &Store, cont_ptr: &ContPtr) -> Self { - use lurk::tag::ContTag; - - match cont_ptr.tag { - ContTag::Outermost => Self::Outermost, - ContTag::Terminal => Self::Terminal, - ContTag::Error => Self::Error, - _ => panic!("unsupported continuation"), - } - } -} - -impl Expression { - pub fn eval( - &self, - s: &Store, - limit: usize, - lang: &Lang>, - ) -> Result, Error> { - let expr = self.expr.ptr(s, limit, lang); - let (io, _iterations) = evaluate(s, expr, None, limit, lang)?; - - Ok(io.expr) - } -} - -impl<'a> Opening { - pub fn apply_and_prove( - s: &'a mut Store, - input: Ptr, - function: &CommittedExpression, - limit: usize, - chain: bool, - only_use_cached_proofs: bool, - nova_prover: &'a NovaProver<'_, S1, Coproc, MultiFrame<'a, S1, Coproc>>, - pp: &'a PublicParams>>, - lang: &Arc>>, - ) -> Result, Error> { - let claim = Self::apply(s, input, function, limit, chain, lang)?; - Proof::prove_claim( - s, - &claim, - limit, - only_use_cached_proofs, - nova_prover, - pp, - lang, - ) - } - - pub fn open_and_prove( - s: &'a mut Store, - request: &OpeningRequest, - limit: usize, - only_use_cached_proofs: bool, - nova_prover: &'a NovaProver<'_, S1, Coproc, MultiFrame<'a, S1, Coproc>>, - pp: &'a PublicParams>>, - lang: &Arc>>, - ) -> Result, Error> { - let input = request.input.expr.ptr(s, limit, lang); - let commitment = request.commitment; - - let function_map = committed_expression_store(); - let function = function_map - .get(&commitment) - .ok_or(Error::UnknownCommitment)?; - - Self::apply_and_prove( - s, - input, - &function, - limit, - request.chain, - only_use_cached_proofs, - nova_prover, - pp, - lang, - ) - } - - pub fn open( - s: &Store, - request: &OpeningRequest, - limit: usize, - chain: bool, - lang: &Lang>, - ) -> Result, Error> { - let input = request.input.expr.ptr(s, limit, lang); - let commitment = request.commitment; - - let function_map = committed_expression_store(); - let function = function_map - .get(&commitment) - .ok_or(Error::UnknownCommitment)?; - - Self::apply(s, input, &function, limit, chain, lang) - } - - fn _is_chained(&self) -> bool { - self.new_commitment.is_some() - } - - fn public_output_expression(&self, s: &Store) -> Ptr { - let result = s.read(&self.output).expect("unreadable result"); - - if let Some(commitment) = self.new_commitment { - let c = commitment.ptr(s); - - s.cons(result, c) - } else { - result - } - } - - pub fn apply( - s: &Store, - input: Ptr, - function: &CommittedExpression, - limit: usize, - chain: bool, - lang: &Lang>, - ) -> Result, Error> { - let (commitment, expression) = - Commitment::construct_with_fun_application(s, function, input, limit, lang)?; - let (public_output, _iterations) = evaluate(s, expression, None, limit, lang)?; - - let (new_commitment, output_expr) = if chain { - let cons = public_output.expr; - let result_expr = s.car(&cons)?; - let new_comm = s.cdr(&cons)?; - - let new_secret0 = s.secret(new_comm).expect("secret missing"); - let new_secret = *s.hash_expr(&new_secret0).expect("hash missing").value(); - - let (_, new_fun) = s.open(new_comm).expect("opening missing"); - let new_commitment = Commitment::from_comm(s, &new_comm)?; - - s.hydrate_scalar_cache(); - - let expr = LurkPtr::from_ptr(s, &new_fun); - - let new_function = CommittedExpression:: { - expr, - secret: Some(new_secret), - commitment: Some(new_commitment), - }; - - let function_map = committed_expression_store(); - function_map.set(&new_commitment, &new_function)?; - assert_eq!(new_function, function_map.get(&new_commitment).unwrap()); - - (Some(new_commitment), result_expr) - } else { - (None, public_output.expr) - }; - - let input_string = input.fmt_to_string(s, initial_lurk_state()); - let status = - as Evaluable, Coproc>>::status(&public_output); - let output_string = if status.is_terminal() { - // Only actual output if result is terminal. - output_expr.fmt_to_string(s, initial_lurk_state()) - } else { - // We don't want to leak any internal information in the case of incomplete computations. - // Provers might want to expose results in the case of explicit errors. - // For now, don't -- but consider allowing it as an option. - "".to_string() - }; - - let claim = Claim::Opening(Opening { - commitment, - new_commitment, - input: input_string, - output: output_string, - status, - }); - - Ok(claim) - } -} - -impl<'a> Proof<'a, S1> { - pub fn eval_and_prove( - s: &'a Store, - expr: Ptr, - supplied_env: Option>, - limit: usize, - only_use_cached_proofs: bool, - nova_prover: &'a NovaProver<'_, S1, Coproc, MultiFrame<'a, S1, Coproc>>, - pp: &'a PublicParams>>, - lang: &Arc>>, - ) -> Result { - let env = supplied_env.unwrap_or_else(|| empty_sym_env(s)); - let cont = s.intern_cont_outermost(); - let input = IO { expr, env, cont }; - - // TODO: It's a little silly that we evaluate here, but evaluation is also repeated in `NovaProver::evaluate_and_prove()`. - // Refactor to avoid that. - let (public_output, _iterations) = evaluate(s, expr, supplied_env, limit, lang)?; - - let claim = if supplied_env.is_some() { - // This is a bit of a hack, but the idea is that if the env was supplied it's likely to contain a literal function, - // which we will not be able to read. Therefore, we should not produce a string-based claim. - let ptr_evaluation = PtrEvaluation::new(s, input, public_output, None); - Claim::PtrEvaluation(ptr_evaluation) - } else { - let evaluation = Evaluation::new(s, input, public_output, None); - Claim::Evaluation(evaluation) - }; - - Self::prove_claim( - s, - &claim, - limit, - only_use_cached_proofs, - nova_prover, - pp, - lang, - ) - } - - pub fn prove_claim( - s: &'a Store, - claim: &Claim, - limit: usize, - only_use_cached_proofs: bool, - nova_prover: &'a NovaProver<'_, S1, Coproc, MultiFrame<'a, S1, Coproc>>, - pp: &'a PublicParams>>, - lang: &Arc>>, - ) -> Result { - let reduction_count = nova_prover.reduction_count(); - - let proof_map = nova_proof_cache(reduction_count); - let function_map = committed_expression_store(); - - let key = claim.proof_key()?.to_base32(); - - if let Some(proof) = proof_map.get(&key) { - return Ok(proof); - } - - // FIXME: Error handling. - assert!(!only_use_cached_proofs, "no cached proof"); - - info!("Starting Proving"); - - let (expr, env) = match &claim { - Claim::Evaluation(e) => ( - s.read(&e.expr).expect("bad expression"), - s.read(&e.env).expect("bad env"), - ), - Claim::PtrEvaluation(e) => (e.expr.ptr(s, limit, lang), e.env.ptr(s, limit, lang)), - Claim::Opening(o) => { - let commitment = o.commitment; - - // In order to prove the opening, we need access to the original function. - let function = function_map - .get(&commitment) - .expect("function for commitment missing"); - - let input = s.read(&o.input).expect("bad expression"); - let (c, expression) = - Commitment::construct_with_fun_application(s, &function, input, limit, lang)?; - - assert_eq!(commitment, c); - (expression, empty_sym_env(s)) - } - }; - - let (proof, _public_input, _public_output, num_steps) = nova_prover - .evaluate_and_prove(pp, expr, env, s, limit, lang) - .expect("Nova proof failed"); - - let proof = Self { - claim: claim.clone(), - proof, - num_steps, - reduction_count: ReductionCount::try_from(reduction_count)?, - }; - - match &claim { - Claim::Opening(o) => { - if o.status != Status::Terminal { - return Err(Error::OpeningFailure("Claim status is not Terminal".into())); - }; - } - Claim::Evaluation(e) => { - if e.status != Status::Terminal { - return Err(Error::EvaluationFailure(ReductionError::Misc( - "nonterminal status".into(), - ))); - }; - } - Claim::PtrEvaluation(e) => { - if e.status != Status::Terminal { - return Err(Error::EvaluationFailure(ReductionError::Misc( - "nonterminal status".into(), - ))); - } - } - }; - - proof.verify(pp, lang).expect("Nova verification failed"); - - proof_map.set(&key, &proof).unwrap(); - - Ok(proof) - } - - pub fn verify( - &self, - pp: &PublicParams>>, - lang: &Lang>, - ) -> Result { - let (public_inputs, public_outputs) = self.io_vecs(lang)?; - - let claim_iterations_and_num_steps_are_consistent = if let Claim::Evaluation(Evaluation { - iterations: Some(iterations), - .. - }) = self.claim - { - // Currently, claims created by fcomm don't include the iteration count. If they do, then it should be - // possible to verify correctness. This may require making the iteration count explicit in the public - // output. That will allow maintaining iteration count without incrementing during frames added as - // padding; and it will also allow explicitly masking the count when desired for zero-knowledge. - // Meanwhile, since Nova currently requires the number of steps to be provided by the verifier, we have - // to provide it. For now, we should at least be able to calculate this value based on number of real - // iterations and number of frames per circuit. This is untested and mostly a placeholder to remind us - // that all of this will need to be handled in a more principled way eventually. (#282) - - let num_steps = self.num_steps; - - let chunk_frame_count = self.reduction_count.count(); - let expected_steps = - (iterations / chunk_frame_count) + usize::from(iterations % chunk_frame_count != 0); - - expected_steps == num_steps - } else { - true - }; - - let verified = claim_iterations_and_num_steps_are_consistent - && self - .proof - .verify(pp, self.num_steps, &public_inputs, &public_outputs) - .expect("error verifying"); - - let result = VerificationResult::new(verified); - - Ok(result) - } - - pub fn evaluation_io(&self, s: &Store) -> Result<(IO, IO), Error> { - let evaluation = &self.claim.evaluation().expect("expected evaluation claim"); - - let input_io = { - let expr = s - .read(&evaluation.expr) - .map_err(|e| Error::VerificationError(format!("failed to read expr: {}", e)))?; - - let env = s - .read(&evaluation.env) - .map_err(|e| Error::VerificationError(format!("failed to read env: {}", e)))?; - - // FIXME: We ignore cont and assume Outermost, since we can't read a Cont. - let cont = s.intern_cont_outermost(); - - IO:: { expr, env, cont } - }; - - let output_io = { - let expr = s - .read(&evaluation.expr_out) - .map_err(|e| Error::VerificationError(format!("failed to read expr out: {}", e)))?; - - let env = s - .read(&evaluation.env_out) - .map_err(|e| Error::VerificationError(format!("failed to read env out: {}", e)))?; - let cont = evaluation - .status - .to_cont(s) - .ok_or_else(|| Error::VerificationError("continuation cannot be proved".into()))?; - - IO:: { expr, env, cont } - }; - - Ok((input_io, output_io)) - } - - pub fn ptr_evaluation_io( - &self, - s: &Store, - lang: &Lang>, - ) -> Result<(IO, IO), Error> { - let ptr_evaluation = &self - .claim - .ptr_evaluation() - .expect("expected PtrEvaluation claim"); - - let input_io = { - let expr = ptr_evaluation.expr.ptr(s, 0, lang); // limit is unneeded because we will not eval. we already have the ptr. - let env = ptr_evaluation.env.ptr(s, 0, lang); - let cont = ptr_evaluation.cont.cont_ptr(s); - - IO:: { expr, env, cont } - }; - - let output_io = { - let expr = ptr_evaluation.expr_out.ptr(s, 0, lang); - let env = ptr_evaluation.env_out.ptr(s, 0, lang); - let cont = ptr_evaluation.cont_out.cont_ptr(s); - - IO:: { expr, env, cont } - }; - - Ok((input_io, output_io)) - } - - pub fn opening_io(&self, s: &Store) -> Result<(IO, IO), Error> { - assert!(self.claim.is_opening()); - - let opening = self.claim.opening().expect("expected opening claim"); - let output = opening.public_output_expression(s); - let input = s.read(&opening.input).expect("could not read input"); - - let expression = opening.commitment.fun_application(s, input); - let outermost = s.intern_cont_outermost(); - - let input_io = IO:: { - expr: expression, - env: empty_sym_env(s), - cont: outermost, - }; - - let output_io = IO:: { - expr: output, - env: empty_sym_env(s), - cont: s.intern_cont_terminal(), - }; - - Ok((input_io, output_io)) - } - - pub fn io( - &self, - s: &Store, - lang: &Lang>, - ) -> Result<(IO, IO), Error> { - match self.claim { - Claim::Evaluation(_) => self.evaluation_io(s), - Claim::PtrEvaluation(_) => self.ptr_evaluation_io(s, lang), - Claim::Opening(_) => self.opening_io(s), - } - } - - fn io_vecs(&self, lang: &Lang>) -> Result<(Vec, Vec), Error> { - let s = &mut Store::::default(); - - self.io(s, lang) - .map(|(i, o)| (i.to_inputs(s), o.to_inputs(s))) - } -} - -impl VerificationResult { - fn new(verified: bool) -> Self { - Self { verified } - } -} - -pub fn evaluate( - store: &Store, - expr: Ptr, - supplied_env: Option>, - limit: usize, - lang: &Lang>, -) -> Result<(IO, usize), Error> { - let env = supplied_env.unwrap_or_else(|| empty_sym_env(store)); - let mut evaluator = Evaluator::new(expr, env, store, limit, lang); - - let (io, iterations, _) = evaluator.eval().map_err(Error::EvaluationFailure)?; - - assert!( as Evaluable, Coproc>>::is_terminal(&io)); - Ok((io, iterations)) -} - -#[cfg(test)] -mod test { - use super::*; - use crate::file_map::FileStore; - use camino::Utf8Path; - use insta::assert_json_snapshot; - use lurk::public_parameters::instance::{Instance, Kind}; - use std::path::Path; - use std::sync::Arc; - use tempfile::Builder; - - use lurk::eval::lang::{Coproc, Lang}; - use lurk::proof::{nova::NovaProver, Prover}; - use lurk::public_parameters::public_params; - - // ## Intent - // - // Those tests are intended as a trip-wire for changes tghat modify the serialized format of the data structures - // involved in fcomm tests. Fcomm already has tests depending on those files (see tests/makefile_tests.rs), but - // those tests are expensive to run. Those snapshot tests instead are cheap to run, and will fail if the format - // of serialized data structures changes. They will not detect a change in the file location of the Makedile tests. - // - // ## If you broke this test - // - // You have broken a snapshot test. Unlike round-trip tests, those tests check the actual format of serialized Lurk expressions, - // and since you broke one, it's probable that you have changed that format, which will break at least the fcomm examples. - // Please read the documentation on snapshot tests `https://insta.rs/docs/quickstart/`, fix the snapshot **AND** - // make sure `cargo nextest run --run-ignored all -E 'test(test_make_fcomm_examples)'` passes - #[test] - fn test_snapshot_serialized_expressions() { - let function_source: &str = "(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))"; - let function_inputs: &str = "(+ 1 2)"; - let committed_expression = CommittedExpression:: { - expr: LurkPtr::Source(function_source.into()), - secret: None, - commitment: None, - }; - assert_json_snapshot!(committed_expression); - - let input = Expression:: { - expr: LurkPtr::Source(function_inputs.into()), - }; - assert_json_snapshot!(input); - - let c = Commitment { - comm: S1::from(123), - }; - assert_json_snapshot!(c); - - let req = OpeningRequest { - input, - commitment: c, - chain: true, - }; - assert_json_snapshot!(req); - - let opening = Opening { - input: function_inputs.to_owned(), - output: function_inputs.to_owned(), - status: Status::Error, - commitment: c, - new_commitment: None, - }; - assert_json_snapshot!(opening); - } - - #[test] - fn test_cert_serialization() { - use serde_json::json; - - let c = Commitment { - comm: S1::from(123), - }; - - let cid = ZExprPtr::from_parts(ExprTag::Comm, c.comm); - let cert = Cert { - claim_cid: cid, - proof_cid: cid, - verified: true, - verifier_id: "asdf".to_string(), - signature: "fdsa".to_string(), - }; - let json = json!(cert); - - let string = json.to_string(); - - let cert_again: Cert = serde_json::from_str(&string).unwrap(); - assert_eq!(cert, cert_again); - } - - // Minimal chained functional commitment test - #[test] - fn lurk_chained_functional_commitment() { - let fcomm_path_key = "FCOMM_DATA_PATH"; - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Utf8Path::from_path(tmp_dir.path()).unwrap(); - let fcomm_path_val = tmp_dir_path.join("fcomm_data"); - - std::env::set_var(fcomm_path_key, &fcomm_path_val); - std::env::set_var( - "LURK_PUBLIC_PARAMS_DIR", - fcomm_path_val.join("public_params"), - ); - assert_eq!( - std::env::var(fcomm_path_key).unwrap(), - fcomm_path_val.as_str() - ); - - let function_source = "(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))"; - let expected_io = [("5", "5"), ("3", "8")]; - - let mut function = CommittedExpression:: { - expr: LurkPtr::Source(function_source.into()), - secret: None, - commitment: None, - }; - - let limit = 1000; - let lang = Lang::new(); - let lang_rc = Arc::new(lang.clone()); - let rc = ReductionCount::One; - let instance = Instance::new(rc.count(), lang_rc.clone(), true, Kind::NovaPublicParams); - let pp = public_params(&instance).expect("public params"); - let chained = true; - let s = &mut Store::::default(); - - let io = expected_io.iter(); - - let fun_ptr = function.expr_ptr(s, limit, &lang).expect("fun_ptr"); - - let (mut commitment, secret) = Commitment::from_ptr_with_hiding(s, &fun_ptr).unwrap(); - - function.secret = Some(secret); - function.commitment = Some(commitment); - - let function_map = committed_expression_store(); - function_map - .set(&commitment, &function) - .expect("function_map set"); - - for (function_input, _expected_output) in io { - let prover = NovaProver::<'_, S1, Coproc, MultiFrame<'_, S1, Coproc>>::new( - rc.count(), - lang.clone(), - ); - - let input = s.read(function_input).expect("Read error"); - - let proof = Opening::apply_and_prove( - s, input, &function, limit, chained, false, &prover, &pp, &lang_rc, - ) - .expect("apply and prove"); - - proof.verify(&pp, &lang_rc).expect("Failed to verify"); - - let opening = proof.claim.opening().expect("expected opening claim"); - - match opening.new_commitment { - Some(c) => commitment = c, - _ => panic!("new commitment missing"), - } - println!("Commitment: {commitment:?}"); - } - } - - proptest! { - #[test] - fn prop_z_bytes(x in any::()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let z_bytes_path = tmp_dir_path.join("zbytes.json"); - x.write_to_path(&z_bytes_path); - assert_eq!(x, ZBytes::read_from_path(&z_bytes_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_z_store_ptr(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let z_store_ptr_path = tmp_dir_path.join("zstoreptr.json"); - x.write_to_path(&z_store_ptr_path); - assert_eq!(x, ZStorePtr::::read_from_path(&z_store_ptr_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_lurk_ptr(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let lurk_ptr_path = tmp_dir_path.join("lurkptr.json"); - x.write_to_path(&lurk_ptr_path); - assert_eq!(x, LurkPtr::::read_from_path(&lurk_ptr_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_ptr_evaluation(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let ptr_evaluation_path = tmp_dir_path.join("ptrevaluation.json"); - x.write_to_path(&ptr_evaluation_path); - assert_eq!(x, PtrEvaluation::::read_from_path(&ptr_evaluation_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_committed_expr(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let committed_expr_path = tmp_dir_path.join("committedexpr.json"); - x.write_to_path(&committed_expr_path); - assert_eq!(x, CommittedExpression::::read_from_path(&committed_expr_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_opening(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let opening_path = tmp_dir_path.join("opening.json"); - x.write_to_path(&opening_path); - assert_eq!(x, Opening::::read_from_path(&opening_path).unwrap()); - } - } - - proptest! { - #[test] - fn prop_claim(x in any::>()) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - let tmp_dir_path = Path::new(tmp_dir.path()); - let claim_path = tmp_dir_path.join("claim.json"); - x.write_to_path(&claim_path); - assert_eq!(x, Claim::::read_from_path(&claim_path).unwrap()); - } - } -} diff --git a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-2.snap b/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-2.snap deleted file mode 100644 index 3e20e5cd25..0000000000 --- a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-2.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: fcomm/src/lib.rs -expression: input ---- -{ - "expr": { - "Source": "(+ 1 2)" - } -} diff --git a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-3.snap b/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-3.snap deleted file mode 100644 index a100cb801c..0000000000 --- a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-3.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: fcomm/src/lib.rs -expression: c ---- -"000000000000000000000000000000000000000000000000000000000000007b" diff --git a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-4.snap b/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-4.snap deleted file mode 100644 index 424e842d04..0000000000 --- a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-4.snap +++ /dev/null @@ -1,13 +0,0 @@ ---- -source: fcomm/src/lib.rs -expression: req ---- -{ - "commitment": "000000000000000000000000000000000000000000000000000000000000007b", - "input": { - "expr": { - "Source": "(+ 1 2)" - } - }, - "chain": true -} diff --git a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-5.snap b/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-5.snap deleted file mode 100644 index 40265b6ad3..0000000000 --- a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions-5.snap +++ /dev/null @@ -1,11 +0,0 @@ ---- -source: fcomm/src/lib.rs -expression: opening ---- -{ - "input": "(+ 1 2)", - "output": "(+ 1 2)", - "status": "Error", - "commitment": "000000000000000000000000000000000000000000000000000000000000007b", - "new_commitment": null -} diff --git a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions.snap b/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions.snap deleted file mode 100644 index 69c6b22f01..0000000000 --- a/fcomm/src/snapshots/fcomm__test__snapshot_serialized_expressions.snap +++ /dev/null @@ -1,11 +0,0 @@ ---- -source: fcomm/src/lib.rs -expression: committed_expression ---- -{ - "expr": { - "Source": "(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))" - }, - "secret": null, - "commitment": null -} diff --git a/fcomm/tests/makefile_tests.rs b/fcomm/tests/makefile_tests.rs deleted file mode 100644 index 9d23e755a1..0000000000 --- a/fcomm/tests/makefile_tests.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::env; -use std::path::PathBuf; -use std::process::Command; - -#[ignore] -#[test] -fn test_make_fcomm_examples() { - // Get the current working directory - let mut current_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - - // Find the examples directory - current_dir.push("examples"); - let examples_dir = current_dir.as_path(); - assert!( - examples_dir.exists() && examples_dir.is_dir(), - "Failed to find the fcomm examples directory" - ); - - // Make clean - let make_clean_output = Command::new("make") - .arg("clean") - .current_dir(examples_dir) - .output() - .expect("Failed to run the make command, is make installed?"); - - assert!( - make_clean_output.status.success(), - "Make command exited with an error: {}", - String::from_utf8_lossy(&make_clean_output.stderr) - ); - - // Run the make command in the examples directory - let cpus = num_cpus::get(); - - let make_output = Command::new("make") - .current_dir(examples_dir) - .arg(format!("-j{cpus}")) - .output() - .expect("Failed to run the make command, is make installed?"); - - // Check the exit status - assert!( - make_output.status.success(), - "Make command exited with an error: {}", - String::from_utf8_lossy(&make_output.stderr) - ); -} diff --git a/fcomm/tests/proof_tests.rs b/fcomm/tests/proof_tests.rs deleted file mode 100644 index d8a34a55ac..0000000000 --- a/fcomm/tests/proof_tests.rs +++ /dev/null @@ -1,331 +0,0 @@ -use assert_cmd::prelude::*; -use lurk::state::initial_lurk_state; -use predicates::prelude::*; -use std::fs::File; -use std::io::Write; -use std::process::Command; -use tempfile::{Builder, TempDir}; - -use pasta_curves::pallas; - -use fcomm::{file_map::FileStore, Commitment, CommittedExpression, LurkPtr, Proof}; -use lurk::store::Store; - -use camino::Utf8Path; - -pub type S1 = pallas::Scalar; - -fn fcomm_cmd() -> std::process::Command { - Command::cargo_bin("fcomm").unwrap() -} - -#[test] -fn test_bad_command() { - let mut cmd = fcomm_cmd(); - - cmd.arg("uiop"); - cmd.assert() - .failure() - .stderr(predicate::str::contains("unrecognized subcommand \'uiop\'")); -} - -#[test] -fn test_eval_expression() { - let mut cmd = fcomm_cmd(); - - let expression = "((lambda (a b) (+ (* a 3) b)) 9 7)"; - - let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); - let expression_path = tmp_dir.path().join("expression.lurk"); - - let mut expression_file = File::create(&expression_path).unwrap(); - write!(expression_file, "{expression}").unwrap(); - - cmd.arg("eval") - .arg("--expression") - .arg(expression_path) - .arg("--lurk"); - - cmd.assert() - .success() - .stdout("{\"expr\":\"((lambda (.lurk.user.a .lurk.user.b) (+ (* .lurk.user.a 3) .lurk.user.b)) 9 7)\",\"env\":\"nil\",\"cont\":\"Outermost\",\"expr_out\":\"34\",\"env_out\":\"nil\",\"cont_out\":\"Terminal\",\"status\":\"Terminal\",\"iterations\":17}"); -} - -fn test_prove_expression( - cmd: &mut Command, - expression_path: &Utf8Path, - proof_path: &Utf8Path, - data_path: &Utf8Path, -) { - cmd.env("fcomm_data_path", data_path) - .arg("prove") - .arg("--expression") - .arg(expression_path) - .arg("--proof") - .arg(proof_path) - .arg("--lurk"); - - cmd.assert().success(); -} - -fn test_open_commitment( - mut cmd: Command, - commitment: String, - input_path: &Utf8Path, - proof_path: &Utf8Path, - data_path: &Utf8Path, - chained: bool, -) { - cmd.env("fcomm_data_path", data_path) - .arg("open") - .arg("--commitment") - .arg(commitment) - .arg("--input") - .arg(input_path) - .arg("--proof") - .arg(proof_path); - - if chained { - cmd.arg("--chain"); - }; - - cmd.assert().success(); -} - -fn test_verify_expression_proof(mut cmd: Command, proof_path: &Utf8Path, _data_path: &Utf8Path) { - cmd.arg("verify").arg("--proof").arg(proof_path); - - cmd.assert().success().stdout("{\"verified\":true}"); -} - -fn test_verify_opening(mut cmd: Command, proof_path: &Utf8Path, _data_path: &Utf8Path) { - cmd.arg("verify").arg("--proof").arg(proof_path); - - cmd.assert().success().stdout("{\"verified\":true}"); -} - -#[test] -#[ignore] -fn test_prove_and_verify_expression() { - let expression = "(* 9 7)"; - let expected = "63"; - - let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); - let tmp_dir_path = Utf8Path::from_path(tmp_dir.path()).unwrap(); - let proof_path = tmp_dir_path.join("proof.json"); - let fcomm_data_path = tmp_dir_path.join("fcomm_data"); - let expression_path = tmp_dir_path.join("expression.lurk"); - - let mut expression_file = File::create(&expression_path).unwrap(); - write!(expression_file, "{expression}").unwrap(); - - { - test_prove_expression( - &mut fcomm_cmd(), - &expression_path, - &proof_path, - &fcomm_data_path, - ); - - let proof = Proof::::read_from_json_path(&proof_path).unwrap(); - - assert_eq!( - proof - .claim - .evaluation() - .expect("expected evaluation claim") - .expr_out, - expected - ); - } - - test_verify_expression_proof(fcomm_cmd(), &proof_path, &fcomm_data_path); -} - -fn commit(function_path: &Utf8Path, commitment_path: &Utf8Path, data_path: &Utf8Path) { - let mut cmd = fcomm_cmd(); - cmd.env("fcomm_data_path", data_path) - .arg("commit") - .arg("--function") - .arg(function_path) - .arg("--commitment") - .arg(commitment_path) - .assert() - .success(); -} - -fn test_create_open_and_verify_functional_commitment_aux( - function_source: &str, - function_input: &str, - expected_output: &str, -) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); - - test_aux( - function_source, - &[(function_input, expected_output)], - false, - &tmp_dir, - ); -} - -fn test_create_open_and_verify_chained_functional_commitment_aux( - function_source: &str, - expected_io: &[(&str, &str)], -) { - let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); - - test_aux(function_source, expected_io, true, &tmp_dir); -} - -fn test_aux(function_source: &str, expected_io: &[(&str, &str)], chained: bool, tmp_dir: &TempDir) { - let function = CommittedExpression:: { - expr: LurkPtr::Source(function_source.into()), - secret: None, - commitment: None, - }; - - test_function_aux(&function, expected_io, chained, tmp_dir) -} - -fn test_function_aux( - function: &CommittedExpression, - expected_io: &[(&str, &str)], - chained: bool, - tmp_dir: &TempDir, -) { - use lurk::writer::Write; - - let io = expected_io.iter(); - - let tmp_dir_path = Utf8Path::from_path(tmp_dir.path()).expect("tmp dir path"); - let proof_path = tmp_dir_path.join("proof.json"); - let function_path = tmp_dir_path.join("function.json"); - let input_path = tmp_dir_path.join("input.lurk"); - let commitment_path = tmp_dir_path.join("commitment.json"); - let fcomm_data_path = tmp_dir_path.join("fcomm_data"); - - function.write_to_json_path(&function_path); - - commit(&function_path, &commitment_path, &fcomm_data_path); - - let mut commitment: Commitment = - Commitment::read_from_json_path(&commitment_path).expect("read commitment"); - - for (function_input, expected_output) in io { - let mut input_file = File::create(&input_path).expect("create file"); - - write!(input_file, "{function_input}").expect("write file"); - - test_open_commitment( - fcomm_cmd(), - commitment.to_string(), - &input_path, - &proof_path, - &fcomm_data_path, - chained, - ); - - let proof = Proof::::read_from_json_path(&proof_path).expect("read proof"); - let opening = proof.claim.opening().expect("expected opening claim"); - - let store = Store::::default(); - - let input = store.read(function_input).expect("store read"); - let canonical_input = input.fmt_to_string(&store, initial_lurk_state()); - - let canonical_output = store - .read(expected_output) - .expect("store read") - .fmt_to_string(&store, initial_lurk_state()); - - assert_eq!(canonical_input, opening.input); - assert_eq!(*expected_output, canonical_output); - - test_verify_opening(fcomm_cmd(), &proof_path, &fcomm_data_path); - - if chained { - match opening.new_commitment { - Some(c) => commitment = c, - _ => panic!("new commitment missing"), - } - } - } -} - -#[test] -#[ignore] -fn test_create_open_and_verify_functional_commitment() { - let function_source = "(lambda (x) (+ x 3))"; - let function_input = "22"; - let expected_output = "25"; - test_create_open_and_verify_functional_commitment_aux( - function_source, - function_input, - expected_output, - ); -} - -#[test] -#[ignore] -fn test_create_open_and_verify_higher_order_functional_commitment() { - let function_source = "(lambda (f) (+ (f 3) 1))"; - let function_input = "(lambda (x) (* x 5))"; - let expected_output = "16"; - test_create_open_and_verify_functional_commitment_aux( - function_source, - function_input, - expected_output, - ); -} - -#[test] -#[ignore] -fn test_create_open_and_verify_chained_functional_commitment() { - let function_source = "(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))"; - - let expected_io = [("5", "5"), ("3", "8")]; - - test_create_open_and_verify_chained_functional_commitment_aux(function_source, &expected_io); -} - -#[test] -#[ignore] -fn test_create_open_and_verify_complicated_higher_order_functional_commitment1() { - let function_source = "(let ((nums '(1 2 3 4 5))) (lambda (f) (f nums)))"; - let function_input = "(letrec ((sum-aux (lambda (acc nums) - (if nums - (sum-aux (+ acc (car nums)) (cdr nums)) - acc))) - (sum (sum-aux 0))) - (lambda (nums) - (sum nums)))"; - let expected_output = "15"; - - test_create_open_and_verify_functional_commitment_aux( - function_source, - function_input, - expected_output, - ); -} - -#[test] -#[ignore] -fn test_create_open_and_verify_complicated_higher_order_functional_commitment2() { - let function_source = "(letrec ((secret-data '((joe 4 3) (bill 10 2 3) (jane 8 7 6 10) (carol 3 5 8))) (filter (lambda (data predicate) (if data (if (predicate (cdr (car data))) (cons (car data) (filter (cdr data) predicate)) (filter (cdr data) predicate))))) (f (lambda (predicate) (car (car (filter secret-data predicate)))))) f)"; - - let function_input = "(letrec ((sum-aux (lambda (acc nums) - (if nums - (sum-aux (+ acc (car nums)) (cdr nums)) - acc))) - (sum (sum-aux 0))) - (lambda (nums) - (= (sum nums) 15)))"; - let expected_output = ".lurk.user.BILL"; - - test_create_open_and_verify_functional_commitment_aux( - function_source, - function_input, - expected_output, - ); -}