From a21ae22e3b817ccdd03b8c547cb697bd922d51e6 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Mon, 1 Jul 2024 03:53:50 +0700 Subject: [PATCH 01/34] potential limit fix --- grovedb/src/operations/proof/generate.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index fad64c84..7fecb4b2 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -194,7 +194,7 @@ impl GroveDb { return Ok(()).wrap_with_cost(cost); } - let reached_limit = query.query.limit.is_some() && query.query.limit.unwrap() == 0; + let reached_limit = current_limit.map_or(false, |limit| limit == 0); if reached_limit { if is_first_call { cost_return_on_error!( From 9f645708940efc3f404658a92b62dad60ad1105b Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 2 Jul 2024 06:20:49 +0700 Subject: [PATCH 02/34] removed offset from proofs --- grovedb/src/batch/mod.rs | 2 +- grovedb/src/operations/get/query.rs | 9 +- grovedb/src/operations/proof.rs | 3 + grovedb/src/operations/proof/generate.rs | 151 +++--- grovedb/src/operations/proof/util.rs | 34 +- grovedb/src/operations/proof/verify.rs | 63 +-- grovedb/src/query/mod.rs | 46 +- grovedb/src/query_result_type.rs | 17 + grovedb/src/reference_path.rs | 2 +- grovedb/src/tests/mod.rs | 485 +++++++++++++++++-- grovedb/src/tests/query_tests.rs | 213 +++++---- grovedb/src/tests/sum_tree_tests.rs | 4 +- merk/src/merk/mod.rs | 8 + merk/src/merk/prove.rs | 29 +- merk/src/proofs/query/mod.rs | 571 +++++++++-------------- merk/src/proofs/query/verify.rs | 35 +- tutorials/src/bin/proofs.rs | 2 +- tutorials/src/bin/replication.rs | 2 +- 18 files changed, 992 insertions(+), 684 deletions(-) diff --git a/grovedb/src/batch/mod.rs b/grovedb/src/batch/mod.rs index 8674672c..3e4d4275 100644 --- a/grovedb/src/batch/mod.rs +++ b/grovedb/src/batch/mod.rs @@ -3493,7 +3493,7 @@ mod tests { reference_key_query.insert_key(b"key1".to_vec()); let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], reference_key_query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let verification_result = GroveDb::verify_query_raw(&proof, &path_query); diff --git a/grovedb/src/operations/get/query.rs b/grovedb/src/operations/get/query.rs index 7e29b233..810a2f15 100644 --- a/grovedb/src/operations/get/query.rs +++ b/grovedb/src/operations/get/query.rs @@ -45,6 +45,7 @@ use crate::{ reference_path::ReferencePathType, Element, Error, GroveDb, PathQuery, TransactionArg, }; +use crate::operations::proof::ProveOptions; #[cfg(feature = "full")] #[derive(Debug, Eq, PartialEq, Clone)] @@ -152,7 +153,7 @@ where { pub fn get_proved_path_query( &self, path_query: &PathQuery, - is_verbose: bool, + prove_options: Option, transaction: TransactionArg, ) -> CostResult, Error> { if transaction.is_some() { @@ -160,10 +161,8 @@ where { "transactions are not currently supported".to_string(), )) .wrap_with_cost(Default::default()) - } else if is_verbose { - self.prove_verbose(path_query) } else { - self.prove_query(path_query) + self.prove_query(path_query, prove_options) } } @@ -1273,7 +1272,7 @@ mod tests { let path = vec![TEST_LEAF.to_vec()]; let path_query = PathQuery::new(path, SizedQuery::new(query, Some(1000), None)); - let raw_result = db + db .query_raw_keys_optional(&path_query, true, true, true, None) .unwrap() .expect_err( diff --git a/grovedb/src/operations/proof.rs b/grovedb/src/operations/proof.rs index 1734c6c6..88624f77 100644 --- a/grovedb/src/operations/proof.rs +++ b/grovedb/src/operations/proof.rs @@ -34,3 +34,6 @@ mod generate; pub mod util; #[cfg(any(feature = "full", feature = "verify"))] pub mod verify; + +#[cfg(feature = "full")] +pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 7fecb4b2..01b96b07 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -47,38 +47,52 @@ use grovedb_storage::StorageContext; use crate::{ element::helpers::raw_decode, operations::proof::util::{ - increase_limit_and_offset_by, reduce_limit_and_offset_by, write_slice_of_slice_to_slice, - write_slice_to_vec, write_to_vec, ProofTokenType, EMPTY_TREE_HASH, + increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, + write_slice_to_vec, write_to_vec, ProofTokenType, }, reference_path::path_from_reference_path_type, versioning::{prepend_version_to_bytes, PROOF_VERSION}, Element, Error, GroveDb, PathQuery, Query, }; +use crate::query_result_type::QueryResultType; -type LimitOffset = (Option, Option); +#[derive(Debug, Clone, Copy)] +pub struct ProveOptions { + pub is_verbose: bool, + pub multilevel_results: bool, +} + +impl Default for ProveOptions { + fn default() -> Self { + ProveOptions { + is_verbose: false, + multilevel_results: false, + } + } +} impl GroveDb { /// Prove one or more path queries. /// If we more than one path query, we merge into a single path query before /// proving. - pub fn prove_query_many(&self, query: Vec<&PathQuery>) -> CostResult, Error> { + pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { if query.len() > 1 { let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query) + self.prove_query(&query, prove_options) } else { - self.prove_query(query[0]) + self.prove_query(query[0], prove_options) } } /// Prove one or more path queries verbose. /// If we more than one path query, we merge into a single path query before /// proving verbose. - pub fn prove_verbose_many(&self, query: Vec<&PathQuery>) -> CostResult, Error> { + pub fn prove_verbose_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { if query.len() > 1 { let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_verbose(&query) + self.prove_query(&query, prove_options) } else { - self.prove_verbose(query[0]) + self.prove_query(query[0], prove_options) } } @@ -86,38 +100,31 @@ impl GroveDb { /// doesn't allow for subset verification /// Proofs generated with this can only be verified by the path query used /// to generate them. - pub fn prove_query(&self, query: &PathQuery) -> CostResult, Error> { - self.prove_internal(query, false) + pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { + self.prove_internal(query, prove_options) } - /// Generate a verbose proof for a given path query - /// Any path query that is a subset of the original proof generating path - /// query can be used to verify this (subset verification) - pub fn prove_verbose(&self, query: &PathQuery) -> CostResult, Error> { - // TODO: we need to solve the localized limit and offset problem. - // when using a path query that has a limit and offset value, - // to get the expected behaviour, you need to know exactly - // how the proving internals work and how your state looks. - self.prove_internal(query, true) - } - - /// Generates a verbose or non verbose proof based on a bool - fn prove_internal(&self, query: &PathQuery, is_verbose: bool) -> CostResult, Error> { + /// Generates a verbose or non-verbose proof based on a bool + fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { + let ProveOptions { + is_verbose, multilevel_results + } = prove_options.unwrap_or_default(); let mut cost = OperationCost::default(); + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { + return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); + } + let mut proof_result = cost_return_on_error_default!(prepend_version_to_bytes(vec![], PROOF_VERSION)); - let mut limit: Option = query.query.limit; - let mut offset: Option = query.query.offset; - - let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); + let path_slices = path_query.path.iter().map(|x| x.as_slice()).collect::>(); let subtree_exists = self .check_subtree_exists_path_not_found(path_slices.as_slice().into(), None) .unwrap_add_cost(&mut cost); - // if the subtree at the given path doesn't exists, prove that this path + // if the subtree at the given path doesn't exist, prove that this path // doesn't point to a valid subtree match subtree_exists { Ok(_) => { @@ -147,14 +154,24 @@ impl GroveDb { ); } + let mut limit: Option = path_query.query.limit; + + let precomputed_result_map = if !multilevel_results || limit.is_none() { + None + } else { + let result = cost_return_on_error!( + &mut cost, + self.query(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0; + Some(result.to_path_to_key_elements_btree_map()) + }; + cost_return_on_error!( &mut cost, self.prove_subqueries( &mut proof_result, path_slices.clone(), - query, + path_query, &mut limit, - &mut offset, true, is_verbose ) @@ -175,7 +192,6 @@ impl GroveDb { path: Vec<&[u8]>, query: &PathQuery, current_limit: &mut Option, - current_offset: &mut Option, is_first_call: bool, is_verbose: bool, ) -> CostResult<(), Error> { @@ -186,7 +202,7 @@ impl GroveDb { &mut cost, self.open_non_transactional_merk_at_path(path.as_slice().into(), None) ); - if subtree.root_hash().unwrap_add_cost(&mut cost) == EMPTY_TREE_HASH { + if !subtree.has_root_key() { cost_return_on_error_no_add!( &cost, write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) @@ -203,7 +219,7 @@ impl GroveDb { &path.as_slice().into(), &subtree, &query.query.query, - (*current_limit, *current_offset), + *current_limit, ProofTokenType::SizedMerk, proofs, is_verbose, @@ -222,10 +238,13 @@ impl GroveDb { let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) .unwrap_add_cost(&mut cost); + //let mut elements_to_prove = vec![]; + while let Some((key, value_bytes)) = kv_iterator.next_kv().unwrap_add_cost(&mut cost) { let mut encountered_absence = false; let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); + println!("Element is {:?}", element); match element { Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { let (mut subquery_path, subquery_value) = @@ -234,13 +253,8 @@ impl GroveDb { if subquery_value.is_none() && subquery_path.is_none() { // this element should be added to the result set // hence we have to update the limit and offset value - let reduced_offset = - reduce_limit_and_offset_by(current_limit, current_offset, 1); - if reduced_offset { - offset_inc += 1; - } else { + reduce_limit_by(current_limit, 1); limit_inc += 1; - } continue; } @@ -248,7 +262,7 @@ impl GroveDb { continue; } - // if the element is a non empty tree then current tree is not a leaf tree + // if the element is a non-empty tree then current tree is not a leaf tree if is_leaf_tree { is_leaf_tree = false; cost_return_on_error!( @@ -257,7 +271,7 @@ impl GroveDb { &path.as_slice().into(), &subtree, &query.query.query, - (None, None), + None, ProofTokenType::Merk, proofs, is_verbose, @@ -291,7 +305,7 @@ impl GroveDb { &new_path.as_slice().into(), &inner_subtree, &key_as_query, - (None, None), + None, ProofTokenType::Merk, proofs, is_verbose, @@ -345,7 +359,7 @@ impl GroveDb { &new_path.as_slice().into(), &inner_subtree, &key_as_query, - (None, None), + None, ProofTokenType::Merk, proofs, is_verbose, @@ -401,7 +415,6 @@ impl GroveDb { new_path, &new_path_query, current_limit, - current_offset, false, is_verbose, ) @@ -420,14 +433,14 @@ impl GroveDb { if is_leaf_tree { // if no useful subtree, then we care about the result set of this subtree. // apply the sized query - increase_limit_and_offset_by(current_limit, current_offset, limit_inc, offset_inc); + increase_limit_by(current_limit, limit_inc); let limit_offset = cost_return_on_error!( &mut cost, self.generate_and_store_merk_proof( &path.as_slice().into(), &subtree, &query.query.query, - (*current_limit, *current_offset), + *current_limit, ProofTokenType::SizedMerk, proofs, is_verbose, @@ -435,11 +448,10 @@ impl GroveDb { ) ); - // update limit and offset values - *current_limit = limit_offset.0; - *current_offset = limit_offset.1; + // update limit + *current_limit = limit_offset; } else { - reduce_limit_and_offset_by(current_limit, current_offset, to_add_to_result_set); + reduce_limit_by(current_limit, to_add_to_result_set); } Ok(()).wrap_with_cost(cost) @@ -471,7 +483,7 @@ impl GroveDb { &path_slice.into(), &subtree, &query, - (None, None), + None, ProofTokenType::Merk, proof_result, is_verbose, @@ -490,12 +502,12 @@ impl GroveDb { path: &SubtreePath, subtree: &'a Merk, query: &Query, - limit_offset: LimitOffset, + limit: Option, proof_token_type: ProofTokenType, proofs: &mut Vec, is_verbose: bool, key: &[u8], - ) -> CostResult<(Option, Option), Error> + ) -> CostResult, Error> where S: StorageContext<'a> + 'a, B: AsRef<[u8]>, @@ -507,22 +519,23 @@ impl GroveDb { )) .wrap_with_cost(Default::default()); } + println!("generate_and_store_merk_proof path {:?} query {:?} limit_offset {:?} proof_token_type {}", path.to_vec().into_iter().map(hex::encode).collect::>().join("/"), query, limit, proof_token_type); let mut cost = OperationCost::default(); // if the subtree is empty, return the EmptyTree proof op - if subtree.root_hash().unwrap() == EMPTY_TREE_HASH { + if !subtree.has_root_key() { cost_return_on_error_no_add!( &cost, write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) ); - return Ok(limit_offset).wrap_with_cost(cost); + return Ok(limit).wrap_with_cost(cost); } let mut proof_result = cost_return_on_error_no_add!( &cost, subtree - .prove_without_encoding(query.clone(), limit_offset.0, limit_offset.1) + .prove_without_encoding(query.clone(), limit) .unwrap() .map_err(|_e| Error::InternalError("failed to generate proof")) ); @@ -542,7 +555,7 @@ impl GroveDb { // write the merk proof cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, &proof_bytes)); - Ok((proof_result.limit, proof_result.offset)).wrap_with_cost(cost) + Ok(proof_result.limit).wrap_with_cost(cost) } /// Serializes a path and add it to the proof vector @@ -596,7 +609,7 @@ impl GroveDb { ¤t_path.as_slice().into(), &subtree, &next_key_query, - (None, None), + None, ProofTokenType::Merk, proof_result, is_verbose, @@ -731,7 +744,7 @@ mod tests { &path.as_slice().into(), &merk, &query, - (None, None), + None, ProofTokenType::Merk, &mut proof, true, @@ -747,7 +760,7 @@ mod tests { assert_eq!(proof_token_type, ProofTokenType::Merk); assert_eq!(key, Some(b"innertree".to_vec())); - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) .unwrap() .unwrap(); assert_eq!(root_hash, expected_root_hash); @@ -765,7 +778,7 @@ mod tests { &EMPTY_PATH, &merk, &query, - (None, None), + None, ProofTokenType::Merk, &mut proof, true, @@ -781,7 +794,7 @@ mod tests { assert_eq!(proof_token_type, ProofTokenType::Merk); assert_eq!(key, Some(vec![])); - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) .unwrap() .unwrap(); assert_eq!(root_hash, expected_root_hash); @@ -811,7 +824,7 @@ mod tests { &path.as_slice().into(), &merk, &query, - (None, None), + None, ProofTokenType::Merk, &mut proofs, true, @@ -831,7 +844,7 @@ mod tests { &path.as_slice().into(), &merk, &query, - (None, None), + None, ProofTokenType::Merk, &mut proofs, true, @@ -851,7 +864,7 @@ mod tests { &path.as_slice().into(), &merk, &query, - (None, None), + None, ProofTokenType::Merk, &mut proofs, true, @@ -869,7 +882,7 @@ mod tests { assert_eq!(proof_token_type, ProofTokenType::Merk); - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) .unwrap() .unwrap(); assert_eq!(root_hash, inner_tree_root_hash); @@ -884,7 +897,7 @@ mod tests { assert_eq!(proof_token_type, ProofTokenType::Merk); - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) .unwrap() .unwrap(); assert_eq!(root_hash, inner_tree_4_root_hash); @@ -898,7 +911,7 @@ mod tests { assert_eq!(proof_token_type, ProofTokenType::Merk); - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) .unwrap() .unwrap(); assert_eq!(root_hash, deeper_1_root_hash); diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 82e8c585..c3749a16 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -41,7 +41,7 @@ use integer_encoding::{VarInt, VarIntReader}; #[cfg(any(feature = "full", feature = "verify"))] use crate::Error; -use crate::{operations::proof::verify::ProvedKeyValues, reference_path::ReferencePathType}; +use crate::operations::proof::verify::ProvedKeyValues; #[cfg(any(feature = "full", feature = "verify"))] pub const EMPTY_TREE_HASH: [u8; 32] = [0; 32]; @@ -337,29 +337,12 @@ pub fn write_slice_of_slice_to_slice(dest: &mut W, value: &[&[u8]]) -> } #[cfg(any(feature = "full", feature = "verify"))] -pub fn reduce_limit_and_offset_by( +pub fn reduce_limit_by( limit: &mut Option, - offset: &mut Option, n: u16, -) -> bool { - let mut skip_limit = false; - let mut n = n; - - if let Some(offset_value) = *offset { - if offset_value > 0 { - if offset_value >= n { - *offset = Some(offset_value - n); - n = 0; - } else { - *offset = Some(0); - n -= offset_value; - } - skip_limit = true; - } - } - +) { if let Some(limit_value) = *limit { - if !skip_limit && limit_value > 0 { + if limit_value > 0 { if limit_value >= n { *limit = Some(limit_value - n); } else { @@ -367,19 +350,12 @@ pub fn reduce_limit_and_offset_by( } } } - - skip_limit } -pub fn increase_limit_and_offset_by( +pub fn increase_limit_by( limit: &mut Option, - offset: &mut Option, limit_inc: u16, - offset_inc: u16, ) { - if let Some(offset_value) = *offset { - *offset = Some(offset_value + offset_inc); - } if let Some(limit_value) = *limit { *limit = Some(limit_value + limit_inc); } diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 7a347c15..d26ac4ef 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -42,7 +42,7 @@ use grovedb_merk::{ use crate::{ operations::proof::util::{ - reduce_limit_and_offset_by, ProvedPathKeyValue, ProvedPathKeyValues, + reduce_limit_by, ProvedPathKeyValue, ProvedPathKeyValues, }, query_result_type::PathKeyOptionalElementTrio, versioning::read_and_consume_proof_version, @@ -240,7 +240,6 @@ impl GroveDb { /// Proof verifier struct ProofVerifier { limit: Option, - offset: Option, result_set: ProvedPathKeyValues, } @@ -250,7 +249,6 @@ impl ProofVerifier { pub fn new(query: &PathQuery) -> Self { ProofVerifier { limit: query.query.limit, - offset: query.query.offset, result_set: vec![], } } @@ -406,26 +404,21 @@ impl ProofVerifier { if subquery_value.is_none() && subquery_path.is_none() { // add this element to the result set - let skip_limit = reduce_limit_and_offset_by( + reduce_limit_by( &mut self.limit, - &mut self.offset, 1, ); - if !skip_limit { - // only insert to the result set if the offset value is not - // greater than 0 - self.result_set.push( - ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: current_value_bytes, - proof: value_hash, - }, - ), - ); - } + self.result_set.push( + ProvedPathKeyValue::from_proved_key_value( + path, + ProvedKeyValue { + key, + value: current_value_bytes, + proof: value_hash, + }, + ), + ); continue; } @@ -535,22 +528,17 @@ impl ProofVerifier { break; } - let skip_limit = - reduce_limit_and_offset_by(&mut self.limit, &mut self.offset, 1); - - if !skip_limit { - // only insert to the result set if the offset value is not greater - // than 0 - self.result_set - .push(ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: value_bytes, - proof: value_hash, - }, - )); - } + reduce_limit_by(&mut self.limit, 1); + + self.result_set + .push(ProvedPathKeyValue::from_proved_key_value( + path, + ProvedKeyValue { + key, + value: value_bytes, + proof: value_hash, + }, + )); } } } @@ -901,15 +889,13 @@ impl ProofVerifier { ) -> Result<(CryptoHash, Option), Error> { let is_sized_proof = proof_token_type == ProofTokenType::SizedMerk; let mut limit = None; - let mut offset = None; if is_sized_proof { limit = self.limit; - offset = self.offset; } let (hash, result) = - grovedb_merk::execute_proof(proof, query, limit, offset, left_to_right) + grovedb_merk::execute_proof(proof, query, limit, left_to_right) .unwrap() .map_err(|e| { eprintln!("{e}"); @@ -922,7 +908,6 @@ impl ProofVerifier { if is_sized_proof { self.limit = result.limit; - self.offset = result.offset; self.result_set.extend(proved_path_key_values); Ok((hash, None)) } else { diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index db75144d..67451b8a 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -294,7 +294,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 1); @@ -304,7 +304,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 1); @@ -312,7 +312,7 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge path queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set_tree) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set_tree.len(), 2); @@ -330,7 +330,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 1); @@ -340,7 +340,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 1); @@ -350,7 +350,7 @@ mod tests { assert_eq!(merged_path_query.path, vec![TEST_LEAF.to_vec()]); assert_eq!(merged_path_query.query.query.items.len(), 2); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -374,7 +374,7 @@ mod tests { query_one.clone(), ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 3); @@ -391,7 +391,7 @@ mod tests { query_two.clone(), ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -408,7 +408,7 @@ mod tests { query_three.clone(), ); - let proof = temp_db.prove_query(&path_query_three).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_three, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -544,7 +544,7 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 7); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, proved_result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -589,7 +589,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 6); @@ -606,7 +606,7 @@ mod tests { query_two, ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -615,7 +615,7 @@ mod tests { .expect("expect to merge path queries"); assert_eq!(merged_path_query.path, vec![b"deep_leaf".to_vec()]); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -655,7 +655,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -665,7 +665,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -677,7 +677,7 @@ mod tests { query_three, ); - let proof = temp_db.prove_query(&path_query_three).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_three, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -686,7 +686,7 @@ mod tests { PathQuery::merge(vec![&path_query_one, &path_query_two, &path_query_three]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); @@ -705,7 +705,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -715,7 +715,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -723,7 +723,7 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -738,7 +738,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -755,7 +755,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 3); @@ -838,7 +838,7 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 4); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 289ffb26..5eb50206 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -209,6 +209,23 @@ impl QueryResultElements { map } + /// To path to key, elements btree map + pub fn to_path_to_key_elements_btree_map(self) -> BTreeMap> { + let mut map: BTreeMap> = BTreeMap::new(); + + for result_item in self.elements.into_iter() { + if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = + result_item + { + map.entry(path) + .or_insert_with(BTreeMap::new) + .insert(key, element); + } + } + + map + } + /// To last path to key, elements btree map pub fn to_last_path_to_key_elements_btree_map(self) -> BTreeMap> { let mut map: BTreeMap, BTreeMap> = BTreeMap::new(); diff --git a/grovedb/src/reference_path.rs b/grovedb/src/reference_path.rs index 38c3f147..d1fd4baa 100644 --- a/grovedb/src/reference_path.rs +++ b/grovedb/src/reference_path.rs @@ -478,7 +478,7 @@ mod tests { ); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (hash, result) = diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index 95e0d2b1..eef5a059 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -50,6 +50,7 @@ use crate::{ query_result_type::QueryResultType::QueryKeyElementPairResultType, reference_path::ReferencePathType, tests::common::compare_result_tuples, }; +use crate::query_result_type::QueryResultType; pub const TEST_LEAF: &[u8] = b"test_leaf"; @@ -157,6 +158,10 @@ pub fn make_deep_tree() -> TempGroveDb { // deeper_4 // k10,v10 // k11,v11 + // deeper_5 + // k12,v12 + // k13,v13 + // k14,v14 // Insert elements into grovedb instance let temp_db = make_test_grovedb(); @@ -339,6 +344,16 @@ pub fn make_deep_tree() -> TempGroveDb { ) .unwrap() .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2"].as_ref(), + b"deeper_5", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); // Insert level 3 nodes temp_db .insert( @@ -451,6 +466,265 @@ pub fn make_deep_tree() -> TempGroveDb { ) .unwrap() .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key12", + Element::new_item(b"value12".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key13", + Element::new_item(b"value13".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key14", + Element::new_item(b"value14".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db +} + +pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { + // Tree Structure + // root + // deep_leaf + // deep_node_1 + // "" -> "empty" + // a -> "storage" + // c + // 1 (sum tree) + // [0;32], 1 + // [1;32], 1 + // d + // 0,v1 + // 1 (sum tree) + // [0;32], 4 + // [1;32], 1 + // e + // 0,v4 + // 1 (sum tree) + // [0;32], 1 + // [1;32], 4 + // f + // 0,v1 + // 1 (sum tree) + // [0;32], 1 + // [1;32], 4 + // g + // 0,v4 + // 1 (sum tree) + // [3;32], 4 + // [5;32], 4 + // h -> "h" + // .. -> .. + // z -> "z" + + let temp_db = make_test_grovedb(); + + // Add deep_leaf to root + temp_db + .insert(EMPTY_PATH, DEEP_LEAF, Element::empty_tree(), None, None) + .unwrap() + .expect("successful root tree leaf insert"); + + // Add deep_node_1 to deep_leaf + temp_db + .insert( + [DEEP_LEAF].as_ref(), + b"deep_node_1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + // Add a -> "storage" to deep_node_1 + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + b"", + Element::new_item("empty".as_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + + + // Add a -> "storage" to deep_node_1 + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + b"a", + Element::new_item("storage".as_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + + // Add c, d, e, f, g to deep_node_1 + for key in [b"c", b"d", b"e", b"f", b"g"].iter() { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + key.as_slice(), + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + } + + // Add sum tree to c + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"c"].as_ref(), + b"1", + Element::new_sum_tree(None), + None, + None, + ) + .unwrap() + .expect("successful sum tree insert"); + + // Add items to sum tree in c + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"c", b"1"].as_ref(), + &[0; 32], + Element::SumItem(1, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"c", b"1"].as_ref(), + &[1; 32], + Element::SumItem(1, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + + // Add items to 4, 5, 6, 7 + for (key, value) in [ + (b"d", b"v1"), + (b"e", b"v4"), + (b"f", b"v1"), + (b"g", b"v4"), + ] + .iter() + { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice()].as_ref(), + b"0", + Element::new_item(value.to_vec()), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice()].as_ref(), + b"1", + Element::new_sum_tree(None), + None, + None, + ) + .unwrap() + .expect("successful sum tree insert"); + } + + // Add items to sum trees in d, e, f + for key in [b"d", b"e", b"f"].iter() { + let (value1, value2) = if *key == b"d" { + (4, 1) + } else { + (1, 4) + }; + + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice(), b"1"].as_ref(), + &[0; 32], + Element::SumItem(value1, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice(), b"1"].as_ref(), + &[1; 32], + Element::SumItem(value2, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + } + + // Add items to sum tree in 7 + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"g", b"1"].as_ref(), + &[3; 32], + Element::SumItem(4, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"g", b"1"].as_ref(), + &[5; 32], + Element::SumItem(4, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + + // Add entries for all letters from "h" to "z" + for letter in b'h'..=b'z' { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + &[letter], + Element::new_item(vec![letter]), + None, + None, + ) + .unwrap() + .expect(&format!("successful item insert for {}", letter as char)); + } + temp_db } @@ -590,7 +864,7 @@ fn test_element_with_flags() { SizedQuery::new(query, None, None), ); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should successfully create proof"); let (root_hash, result_set) = @@ -1036,7 +1310,7 @@ fn test_proof_for_invalid_path_root_key() { let query = Query::new(); let path_query = PathQuery::new_unsized(vec![b"invalid_path_key".to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1052,7 +1326,7 @@ fn test_proof_for_invalid_path() { let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"invalid_key".to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1069,7 +1343,7 @@ fn test_proof_for_invalid_path() { query, ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1087,7 +1361,7 @@ fn test_proof_for_invalid_path() { query, ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1105,7 +1379,7 @@ fn test_proof_for_invalid_path() { query, ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1123,7 +1397,7 @@ fn test_proof_for_non_existent_data() { // path to empty subtree let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1270,7 +1544,7 @@ fn test_path_query_proofs_without_subquery_with_reference() { query, ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); assert_eq!( hex::encode(&proof), "010285010198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01bb979cb\ @@ -1404,7 +1678,7 @@ fn test_path_query_proofs_without_subquery() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); assert_eq!( hex::encode(proof.as_slice()), "01025503046b6579310009000676616c7565310002018655e18e4555b0b65\ @@ -1431,7 +1705,7 @@ fn test_path_query_proofs_without_subquery() { SizedQuery::new(query, Some(1), None), ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1439,22 +1713,6 @@ fn test_path_query_proofs_without_subquery() { let r1 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); compare_result_tuples(result_set, vec![(b"key2".to_vec(), r1)]); - // Range query + offset + limit - let mut query = Query::new(); - query.insert_range_after(b"key1".to_vec()..); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(1), Some(1)), - ); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key3".to_vec(), r1)]); - // Range query + direction + limit let mut query = Query::new_with_direction(false); query.insert_all(); @@ -1463,7 +1721,7 @@ fn test_path_query_proofs_without_subquery() { SizedQuery::new(query, Some(2), None), ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1489,7 +1747,7 @@ fn test_path_query_proofs_with_default_subquery() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1523,7 +1781,7 @@ fn test_path_query_proofs_with_default_subquery() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1546,7 +1804,7 @@ fn test_path_query_proofs_with_default_subquery() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect( "should execute proof", @@ -1576,12 +1834,12 @@ fn test_path_query_proofs_with_default_subquery() { let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); + assert_eq!(result_set.len(), 14); let keys = [ b"key1".to_vec(), @@ -1595,6 +1853,9 @@ fn test_path_query_proofs_with_default_subquery() { b"key9".to_vec(), b"key10".to_vec(), b"key11".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), ]; let values = [ b"value1".to_vec(), @@ -1608,6 +1869,9 @@ fn test_path_query_proofs_with_default_subquery() { b"value9".to_vec(), b"value10".to_vec(), b"value11".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), ]; let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); @@ -1629,7 +1893,7 @@ fn test_path_query_proofs_with_subquery_path() { let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1653,7 +1917,7 @@ fn test_path_query_proofs_with_subquery_path() { query.set_subquery(subq); let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); @@ -1677,7 +1941,7 @@ fn test_path_query_proofs_with_subquery_path() { let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"deep_node_1".to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); @@ -1719,7 +1983,7 @@ fn test_path_query_proofs_with_subquery_path() { query.set_subquery(subq); let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); @@ -1741,7 +2005,7 @@ fn test_path_query_proofs_with_key_and_subquery() { let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1777,7 +2041,7 @@ fn test_path_query_proofs_with_conditional_subquery() { query.set_subquery(subquery); let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1789,6 +2053,7 @@ fn test_path_query_proofs_with_conditional_subquery() { b"deeper_3".to_vec(), b"key10".to_vec(), b"key11".to_vec(), + b"deeper_5".to_vec(), ]; assert_eq!(result_set.len(), keys.len()); @@ -1820,7 +2085,7 @@ fn test_path_query_proofs_with_conditional_subquery() { query.set_subquery(subquery); let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1865,7 +2130,7 @@ fn test_path_query_proofs_with_sized_query() { final_conditional_subquery.insert_all(); let mut final_default_subquery = Query::new(); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + final_default_subquery.insert_range_inclusive(b"key4".to_vec()..=b"key6".to_vec()); subquery.add_conditional_subquery( QueryItem::Key(b"deeper_4".to_vec()), @@ -1878,9 +2143,9 @@ fn test_path_query_proofs_with_sized_query() { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), Some(1)), + SizedQuery::new(query, Some(3), None), ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1894,6 +2159,122 @@ fn test_path_query_proofs_with_sized_query() { compare_result_tuples(result_set, expected_result_set); } +#[test] +fn test_path_query_proof_with_range_subquery_and_limit() { + let db = make_deep_tree(); + + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_range_after(b"deeper_3".to_vec()..); + + let mut subquery = Query::new(); + subquery.insert_all(); + + main_query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); + + // Generate proof + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + + // Verify proof + let verification_result = GroveDb::verify_query_raw(&proof, &path_query); + + match verification_result { + Ok((hash, result_set)) => { + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + } + Err(e) => { + panic!("Proof verification failed: {:?}", e); + } + } + + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); + + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } + } +} + +#[test] +fn test_path_query_proof_with_range_subquery_and_limit_with_sum_trees() { + let db = make_deep_tree_with_sum_trees(); + + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_key(b"a".to_vec()); + main_query.insert_range_after(b"b".to_vec()..); + + let mut subquery = Query::new(); + subquery.insert_all(); + + main_query.set_subquery(subquery); + + main_query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, None); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); + + let non_proved_result_elements = db.query(&path_query, false, false, false, QueryResultType::QueryPathKeyElementTrioResultType, None).unwrap().expect("expected query to execute").0; + + assert_eq!(non_proved_result_elements.len(), 3, "Expected 3 results due to limit"); + + let key_elements = non_proved_result_elements.to_key_elements(); + + assert_eq!(key_elements, vec![(vec![97], Element::new_item("storage".as_bytes().to_vec())), (vec![49], Element::SumTree(Some(vec![0;32]), 2, None)), (vec![48], Element::new_item("v1".as_bytes().to_vec()))]); + + // Generate proof + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + + // Verify proof + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).expect("proof verification failed"); + + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); + + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } + } +} + #[test] fn test_path_query_proofs_with_direction() { let temp_db = make_deep_tree(); @@ -1921,17 +2302,17 @@ fn test_path_query_proofs_with_direction() { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), Some(1)), + SizedQuery::new(query, Some(4), None), ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); + assert_eq!(result_set.len(), 4); - let keys = [b"key10".to_vec(), b"key6".to_vec(), b"key5".to_vec()]; - let values = [b"value10".to_vec(), b"value6".to_vec(), b"value5".to_vec()]; + let keys = [b"key11".to_vec(), b"key10".to_vec(), b"key6".to_vec(), b"key5".to_vec()]; + let values = [b"value11".to_vec(), b"value10".to_vec(), b"value6".to_vec(), b"value5".to_vec()]; let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); compare_result_tuples(result_set, expected_result_set); @@ -1951,12 +2332,12 @@ fn test_path_query_proofs_with_direction() { let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); + assert_eq!(result_set.len(), 14); let keys = [ b"key4".to_vec(), @@ -1965,6 +2346,9 @@ fn test_path_query_proofs_with_direction() { b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), b"key10".to_vec(), b"key11".to_vec(), b"key7".to_vec(), @@ -1978,6 +2362,9 @@ fn test_path_query_proofs_with_direction() { b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), b"value10".to_vec(), b"value11".to_vec(), b"value7".to_vec(), diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 579b2e42..ab946e44 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -42,6 +42,7 @@ use crate::{ }, Element, GroveDb, PathQuery, SizedQuery, }; +use crate::operations::proof::ProveOptions; fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { // Insert a couple of subtrees first @@ -346,6 +347,76 @@ fn populate_tree_for_unique_range_subquery_with_non_unique_null_values(db: &mut } } +fn populate_tree_for_uneven_keys(db: &TempGroveDb) { + db.insert( + [TEST_LEAF].as_ref(), + "b".as_ref(), + Element::new_item(1u8.to_be_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF].as_ref(), + "ab".as_ref(), + Element::new_item(2u8.to_be_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF].as_ref(), + "x".as_ref(), + Element::new_item(3u8.to_be_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF].as_ref(), + &[3;32], + Element::new_item(4u8.to_be_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF].as_ref(), + "k".as_ref(), + Element::new_item(5u8.to_be_bytes().to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); +} + +#[test] +fn test_get_correct_order() { + let db = make_test_grovedb(); + populate_tree_for_uneven_keys(&db); + + let path = vec![TEST_LEAF.to_vec()]; + let query = Query::new_range_full(); + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements, vec![vec![4], vec![2], vec![1], vec![5], vec![3]]); +} + #[test] fn test_get_range_query_with_non_unique_subquery() { let db = make_test_grovedb(); @@ -379,7 +450,7 @@ fn test_get_range_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 200); @@ -414,7 +485,7 @@ fn test_get_range_query_with_unique_subquery() { let last_value = 1991_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -449,7 +520,7 @@ fn test_get_range_query_with_unique_subquery_on_references() { let last_value = 1991_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -493,7 +564,7 @@ fn test_get_range_query_with_unique_subquery_with_non_unique_null_values() { let last_value = 1999_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 115); @@ -536,7 +607,7 @@ fn test_get_range_query_with_unique_subquery_ignore_non_unique_null_values() { let last_value = 1999_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 15); @@ -576,7 +647,7 @@ fn test_get_range_inclusive_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 400); @@ -619,7 +690,7 @@ fn test_get_range_inclusive_query_with_non_unique_subquery_on_references() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert!(elements.contains(&last_value)); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 400); @@ -654,7 +725,7 @@ fn test_get_range_inclusive_query_with_unique_subquery() { let last_value = 1995_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 8); @@ -694,7 +765,7 @@ fn test_get_range_from_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 250); @@ -729,7 +800,7 @@ fn test_get_range_from_query_with_unique_subquery() { let last_value = 1999_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -769,7 +840,7 @@ fn test_get_range_to_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 500); @@ -804,7 +875,7 @@ fn test_get_range_to_query_with_unique_subquery() { let last_value = 1994_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 10); @@ -844,7 +915,7 @@ fn test_get_range_to_inclusive_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 550); @@ -884,7 +955,7 @@ fn test_get_range_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bou last_value.append(&mut 100_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 750); @@ -919,7 +990,7 @@ fn test_get_range_to_inclusive_query_with_unique_subquery() { let last_value = 1995_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 11); @@ -959,7 +1030,7 @@ fn test_get_range_after_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 200); @@ -999,7 +1070,7 @@ fn test_get_range_after_to_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 50); @@ -1041,7 +1112,7 @@ fn test_get_range_after_to_inclusive_query_with_non_unique_subquery() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 100); @@ -1083,7 +1154,7 @@ fn test_get_range_after_to_inclusive_query_with_non_unique_subquery_and_key_out_ last_value.append(&mut 100_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 200); @@ -1129,7 +1200,7 @@ fn test_get_range_inclusive_query_with_double_non_unique_subquery() { let last_value = 109_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 60); @@ -1170,7 +1241,7 @@ fn test_get_range_query_with_limit_and_offset() { last_value.append(&mut 149_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 250); @@ -1201,7 +1272,7 @@ fn test_get_range_query_with_limit_and_offset() { last_value.append(&mut 100_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 250); @@ -1233,7 +1304,7 @@ fn test_get_range_query_with_limit_and_offset() { last_value.append(&mut 104_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 55); @@ -1268,12 +1339,6 @@ fn test_get_range_query_with_limit_and_offset() { last_value.append(&mut 123_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); - query.set_subquery_key(subquery_key.clone()); query.set_subquery(subquery.clone()); @@ -1304,12 +1369,6 @@ fn test_get_range_query_with_limit_and_offset() { last_value.append(&mut 119_u32.to_be_bytes().to_vec()); assert_eq!(elements[elements.len() - 1], last_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); - query.set_subquery_key(subquery_key.clone()); query.set_subquery(subquery.clone()); @@ -1328,11 +1387,6 @@ fn test_get_range_query_with_limit_and_offset() { assert_eq!(elements.len(), 0); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - query.set_subquery_key(subquery_key.clone()); query.set_subquery(subquery); @@ -1349,7 +1403,7 @@ fn test_get_range_query_with_limit_and_offset() { assert_eq!(elements.len(), 250); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 250); @@ -1377,12 +1431,6 @@ fn test_get_range_query_with_limit_and_offset() { let last_value = 1996_u32.to_be_bytes().to_vec(); assert_eq!(elements[elements.len() - 1], last_value); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); } #[test] @@ -1524,7 +1572,7 @@ fn test_correct_child_root_hash_propagation_for_parent_in_same_batch() { ); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("expected successful proving"); let (hash, _result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); @@ -1616,7 +1664,7 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 5); assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -1632,7 +1680,7 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 5); assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -1649,7 +1697,7 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 1); assert_eq!(elements, vec![vec![2]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); @@ -1667,7 +1715,7 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 3); assert_eq!(elements, vec![vec![2], vec![3], vec![4]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 3); @@ -1685,7 +1733,7 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 4); assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -1699,12 +1747,6 @@ fn test_mixed_level_proofs() { assert_eq!(elements.len(), 1); assert_eq!(elements, vec![vec![1]]); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - compare_result_sets(&elements, &result_set); } #[test] @@ -1799,7 +1841,7 @@ fn test_mixed_level_proofs_with_tree() { assert_eq!(elements.len(), 5); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -1823,7 +1865,7 @@ fn test_mixed_level_proofs_with_tree() { assert_eq!(elements.len(), 1); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); @@ -1970,7 +2012,7 @@ fn test_mixed_level_proofs_with_subquery_paths() { // // assert_eq!(elements.len(), 2); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); @@ -1987,7 +2029,7 @@ fn test_mixed_level_proofs_with_subquery_paths() { let path_query = PathQuery::new_unsized(path, query.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -2004,7 +2046,7 @@ fn test_mixed_level_proofs_with_subquery_paths() { let path_query = PathQuery::new_unsized(path, query.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -2024,7 +2066,7 @@ fn test_mixed_level_proofs_with_subquery_paths() { let path_query = PathQuery::new_unsized(path, query.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 8); @@ -2040,7 +2082,7 @@ fn test_proof_with_limit_zero() { SizedQuery::new(query, Some(0), Some(0)), ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 0); @@ -2053,7 +2095,7 @@ fn test_result_set_path_after_verification() { query.insert_all(); let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 3); @@ -2084,7 +2126,7 @@ fn test_result_set_path_after_verification() { query.set_subquery(subq); let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -2120,7 +2162,7 @@ fn test_result_set_path_after_verification() { query.set_subquery(subq); let path_query = PathQuery::new_unsized(vec![], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 3); @@ -2163,7 +2205,7 @@ fn test_result_set_path_after_verification() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -2195,7 +2237,7 @@ fn test_verification_with_path_key_optional_element_trio() { query.insert_all(); let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 3); @@ -2244,7 +2286,7 @@ fn test_absence_proof() { SizedQuery::new(query, Some(4), None), ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_with_absence_proof(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 4); @@ -2291,7 +2333,7 @@ fn test_subset_proof_verification() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); // first we prove non-verbose - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 5); @@ -2337,7 +2379,10 @@ fn test_subset_proof_verification() { ); // prove verbose - let verbose_proof = db.prove_verbose(&path_query).unwrap().unwrap(); + let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { + is_verbose: true, + multilevel_results: false, + })).unwrap().unwrap(); assert!(verbose_proof.len() > proof.len()); // subset path query @@ -2379,13 +2424,16 @@ fn test_chained_path_query_verification() { let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], query); // first prove non verbose - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); + assert_eq!(result_set.len(), 14); // prove verbose - let verbose_proof = db.prove_verbose(&path_query).unwrap().unwrap(); + let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { + is_verbose: true, + multilevel_results: false, + })).unwrap().unwrap(); assert!(verbose_proof.len() > proof.len()); // init deeper_1 path query @@ -2603,7 +2651,7 @@ fn test_query_b_depends_on_query_a() { let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); // first we show that this returns the correct output - let proof = db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_one).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); @@ -2615,7 +2663,7 @@ fn test_query_b_depends_on_query_a() { let path_query_two = PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); // show that we get the correct output - let proof = db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_two).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); @@ -2625,7 +2673,10 @@ fn test_query_b_depends_on_query_a() { // now we merge the path queries let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, &path_query_two]).unwrap(); merged_path_queries.query.limit = Some(3); - let proof = db.prove_verbose(&merged_path_queries).unwrap().unwrap(); + let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { + is_verbose: true, + multilevel_results: false, + })).unwrap().unwrap(); // verifier only has access to the statement age > 2 // need to first get the name associated with 2 from the proof @@ -2675,7 +2726,7 @@ fn test_prove_absent_path_with_intermediate_emtpy_tree() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"invalid".to_vec()], query); let proof = grovedb - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proofs"); diff --git a/grovedb/src/tests/sum_tree_tests.rs b/grovedb/src/tests/sum_tree_tests.rs index 6c4a7589..652a7cad 100644 --- a/grovedb/src/tests/sum_tree_tests.rs +++ b/grovedb/src/tests/sum_tree_tests.rs @@ -103,7 +103,7 @@ fn test_sum_tree_behaves_like_regular_tree() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"key".to_vec()], query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (root_hash, result_set) = @@ -169,7 +169,7 @@ fn test_sum_item_behaves_like_regular_item() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"sumkey".to_vec()], query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (root_hash, result_set) = diff --git a/merk/src/merk/mod.rs b/merk/src/merk/mod.rs index 28ce3f43..e9bab4f7 100644 --- a/merk/src/merk/mod.rs +++ b/merk/src/merk/mod.rs @@ -286,6 +286,14 @@ where }) } + /// Returns if the merk has a root tree set + pub fn has_root_key(&self) -> bool { + let tree = self.tree.take(); + let res = tree.is_some(); + self.tree.set(tree); + res + } + /// Returns the total sum value in the Merk tree pub fn sum(&self) -> Result, Error> { self.use_tree(|tree| match tree { diff --git a/merk/src/merk/prove.rs b/merk/src/merk/prove.rs index 7f295534..9e18b02e 100644 --- a/merk/src/merk/prove.rs +++ b/merk/src/merk/prove.rs @@ -29,14 +29,13 @@ where &self, query: Query, limit: Option, - offset: Option, ) -> CostResult { let left_to_right = query.left_to_right; - self.prove_unchecked(query, limit, offset, left_to_right) - .map_ok(|(proof, limit, offset)| { + self.prove_unchecked(query, limit, left_to_right) + .map_ok(|(proof, limit)| { let mut bytes = Vec::with_capacity(128); encode_into(proof.iter(), &mut bytes); - ProofConstructionResult::new(bytes, limit, offset) + ProofConstructionResult::new(bytes, limit) }) } @@ -55,11 +54,10 @@ where &self, query: Query, limit: Option, - offset: Option, ) -> CostResult { let left_to_right = query.left_to_right; - self.prove_unchecked(query, limit, offset, left_to_right) - .map_ok(|(proof, limit, offset)| ProofWithoutEncodingResult::new(proof, limit, offset)) + self.prove_unchecked(query, limit, left_to_right) + .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) } /// Creates a Merkle proof for the list of queried keys. For each key in @@ -78,7 +76,6 @@ where &self, query: I, limit: Option, - offset: Option, left_to_right: bool, ) -> CostResult where @@ -95,14 +92,14 @@ where .wrap_with_cost(Default::default()) .flat_map_ok(|tree| { let mut ref_walker = RefWalker::new(tree, self.source()); - ref_walker.create_proof(query_vec.as_slice(), limit, offset, left_to_right) + ref_walker.create_proof(query_vec.as_slice(), limit, left_to_right) }) - .map_ok(|(proof, _, limit, offset, ..)| (proof, limit, offset)) + .map_ok(|(proof, _, limit, ..)| (proof, limit)) }) } } -type Proof = (LinkedList, Option, Option); +type Proof = (LinkedList, Option); /// Proof construction result pub struct ProofConstructionResult { @@ -110,17 +107,14 @@ pub struct ProofConstructionResult { pub proof: Vec, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } impl ProofConstructionResult { /// New ProofConstructionResult - pub fn new(proof: Vec, limit: Option, offset: Option) -> Self { + pub fn new(proof: Vec, limit: Option) -> Self { Self { proof, limit, - offset, } } } @@ -131,17 +125,14 @@ pub struct ProofWithoutEncodingResult { pub proof: LinkedList, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } impl ProofWithoutEncodingResult { /// New ProofWithoutEncodingResult - pub fn new(proof: LinkedList, limit: Option, offset: Option) -> Self { + pub fn new(proof: LinkedList, limit: Option) -> Self { Self { proof, limit, - offset, } } } diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 29296efc..a26f0e56 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -57,7 +57,7 @@ pub use query_item::intersect::QueryItemIntersectionResult; #[cfg(any(feature = "full", feature = "verify"))] pub use query_item::QueryItem; #[cfg(any(feature = "full", feature = "verify"))] -use verify::ProofAbsenceLimitOffset; +use verify::ProofAbsenceLimit; #[cfg(any(feature = "full", feature = "verify"))] pub use verify::{execute_proof, verify_query, ProofVerificationResult, ProvedKeyValue}; #[cfg(feature = "full")] @@ -116,6 +116,15 @@ impl Query { Self::new_with_direction(true) } + /// Creates a new query which contains all items. + pub fn new_range_full() -> Self { + Self { + items: vec![QueryItem::RangeFull(RangeFull)], + left_to_right: true, + ..Self::default() + } + } + /// Creates a new query which contains only one key. pub fn new_single_key(key: Vec) -> Self { Self { @@ -520,10 +529,9 @@ where &mut self, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { - self.create_proof(query, limit, offset, left_to_right) + ) -> CostResult { + self.create_proof(query, limit, left_to_right) } /// Generates a proof for the list of queried keys. Returns a tuple @@ -537,9 +545,8 @@ where &mut self, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { + ) -> CostResult { let mut cost = OperationCost::default(); // TODO: don't copy into vec, support comparing QI to byte slice @@ -557,8 +564,6 @@ where let current_node_in_query: bool; let mut node_on_non_inclusive_bounds = false; - // becomes true if the offset exists and is non zero - let mut skip_current_node = false; let (mut left_items, mut right_items) = match search { Ok(index) => { @@ -602,77 +607,63 @@ where } }; - if offset.is_none() || offset == Some(0) { - // when the limit hits zero, the rest of the query batch should be cleared - // so empty the left, right query batch, and set the current node to not found - if let Some(current_limit) = limit { - if current_limit == 0 { - left_items = &[]; - search = Err(Default::default()); - right_items = &[]; - } + // when the limit hits zero, the rest of the query batch should be cleared + // so empty the left, right query batch, and set the current node to not found + if let Some(current_limit) = limit { + if current_limit == 0 { + left_items = &[]; + search = Err(Default::default()); + right_items = &[]; } } let proof_direction = left_to_right; // signifies what direction the DFS should go - let (mut proof, left_absence, mut new_limit, mut new_offset) = if left_to_right { + let (mut proof, left_absence, mut new_limit) = if left_to_right { cost_return_on_error!( &mut cost, - self.create_child_proof(proof_direction, left_items, limit, offset, left_to_right) + self.create_child_proof(proof_direction, left_items, limit, left_to_right) ) } else { cost_return_on_error!( &mut cost, - self.create_child_proof(proof_direction, right_items, limit, offset, left_to_right) + self.create_child_proof(proof_direction, right_items, limit, left_to_right) ) }; - if let Some(current_offset) = new_offset { - if current_offset > 0 && current_node_in_query && !node_on_non_inclusive_bounds { - // reserve offset slot for current node before generating proof for right - // subtree - new_offset = Some(current_offset - 1); - skip_current_node = true; - } - } - - if !skip_current_node && (new_offset.is_none() || new_offset == Some(0)) { - if let Some(current_limit) = new_limit { - // if after generating proof for the left subtree, the limit becomes 0 - // clear the current node and clear the right batch - if current_limit == 0 { + if let Some(current_limit) = new_limit { + // if after generating proof for the left subtree, the limit becomes 0 + // clear the current node and clear the right batch + if current_limit == 0 { + if left_to_right { + right_items = &[]; + } else { + left_items = &[]; + } + search = Err(Default::default()); + } else if current_node_in_query && !node_on_non_inclusive_bounds { + // if limit is not zero, reserve a limit slot for the current node + // before generating proof for the right subtree + new_limit = Some(current_limit - 1); + // if after limit slot reservation, limit becomes 0, right query + // should be cleared + if current_limit - 1 == 0 { if left_to_right { right_items = &[]; } else { left_items = &[]; } - search = Err(Default::default()); - } else if current_node_in_query && !node_on_non_inclusive_bounds { - // if limit is not zero, reserve a limit slot for the current node - // before generating proof for the right subtree - new_limit = Some(current_limit - 1); - // if after limit slot reservation, limit becomes 0, right query - // should be cleared - if current_limit - 1 == 0 { - if left_to_right { - right_items = &[]; - } else { - left_items = &[]; - } - } } } } let proof_direction = !proof_direction; // search the opposite path on second pass - let (mut right_proof, right_absence, new_limit, new_offset) = if left_to_right { + let (mut right_proof, right_absence, new_limit) = if left_to_right { cost_return_on_error!( &mut cost, self.create_child_proof( proof_direction, right_items, new_limit, - new_offset, left_to_right, ) ) @@ -683,7 +674,6 @@ where proof_direction, left_items, new_limit, - new_offset, left_to_right, ) ) @@ -693,7 +683,7 @@ where proof.push_back(match search { Ok(_) => { - if node_on_non_inclusive_bounds || skip_current_node { + if node_on_non_inclusive_bounds { if left_to_right { Op::Push(self.to_kvdigest_node()) } else { @@ -741,7 +731,6 @@ where proof, (left_absence.0, right_absence.1), new_limit, - new_offset, )) .wrap_with_cost(cost) } @@ -754,16 +743,15 @@ where left: bool, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { + ) -> CostResult { if !query.is_empty() { self.walk(left, None::<&fn(&[u8]) -> Option>) .flat_map_ok(|child_opt| { if let Some(mut child) = child_opt { - child.create_proof(query, limit, offset, left_to_right) + child.create_proof(query, limit, left_to_right) } else { - Ok((LinkedList::new(), (true, true), limit, offset)) + Ok((LinkedList::new(), (true, true), limit)) .wrap_with_cost(Default::default()) } }) @@ -774,9 +762,9 @@ where } else { Op::PushInverted(link.to_hash_node()) }); - Ok((proof, (false, false), limit, offset)).wrap_with_cost(Default::default()) + Ok((proof, (false, false), limit)).wrap_with_cost(Default::default()) } else { - Ok((LinkedList::new(), (false, false), limit, offset)) + Ok((LinkedList::new(), (false, false), limit)) .wrap_with_cost(Default::default()) } } @@ -875,7 +863,6 @@ mod test { .collect::>() .as_slice(), None, - None, true, ) .unwrap() @@ -893,7 +880,7 @@ mod test { query.insert_key(key.clone()); } - let result = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let result = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .expect("verify failed"); @@ -1105,7 +1092,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, absence, ..) = walker - .create_full_proof(vec![].as_slice(), None, None, true) + .create_full_proof(vec![].as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1142,7 +1129,6 @@ mod test { bytes.as_slice(), &Query::new(), None, - None, true, tree.hash().unwrap(), ) @@ -1158,7 +1144,7 @@ mod test { let queryitems = vec![QueryItem::Key(vec![5])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1203,7 +1189,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1219,7 +1204,7 @@ mod test { let queryitems = vec![QueryItem::Key(vec![3])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1264,7 +1249,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1280,7 +1264,7 @@ mod test { let queryitems = vec![QueryItem::Key(vec![3]), QueryItem::Key(vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1329,7 +1313,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1349,7 +1332,7 @@ mod test { QueryItem::Key(vec![7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1402,7 +1385,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1421,7 +1403,7 @@ mod test { let queryitems = vec![QueryItem::Key(vec![8])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1465,7 +1447,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1481,7 +1462,7 @@ mod test { let queryitems = vec![QueryItem::Key(vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1528,7 +1509,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1621,7 +1601,7 @@ mod test { QueryItem::Key(vec![4]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1722,7 +1702,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1791,7 +1770,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1875,7 +1854,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -1889,7 +1867,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_tree_seq(10); @@ -1899,7 +1877,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -1913,7 +1891,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -1924,7 +1901,7 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_tree_seq(10); @@ -1934,7 +1911,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -1948,7 +1925,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -1956,7 +1932,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_tree_seq(10); @@ -1966,7 +1942,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -1980,7 +1956,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -1988,7 +1963,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(198)); // right to left test let mut tree = make_tree_seq(10); @@ -1998,7 +1972,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2012,7 +1986,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -2036,7 +2009,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2121,7 +2094,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -2136,7 +2108,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_tree_seq(10); @@ -2146,7 +2118,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2160,7 +2132,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -2171,7 +2142,7 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_tree_seq(10); @@ -2181,7 +2152,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2195,7 +2166,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -2206,7 +2176,7 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_tree_seq(10); @@ -2216,7 +2186,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2230,7 +2200,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -2238,7 +2207,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); // right_to_left proof let mut tree = make_tree_seq(10); @@ -2248,7 +2216,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2262,7 +2230,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -2285,7 +2252,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, Some(2), false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2299,7 +2266,6 @@ mod test { bytes.as_slice(), &query, None, - Some(2), false, tree.hash().unwrap(), ) @@ -2311,7 +2277,7 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60])], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -2321,7 +2287,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2382,7 +2348,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -2393,7 +2358,7 @@ mod test { vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -2401,13 +2366,13 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::Key(vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2424,7 +2389,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -2432,7 +2396,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -2440,7 +2404,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); @@ -2450,7 +2414,7 @@ mod test { QueryItem::Key(vec![7]), ]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2467,7 +2431,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -2475,7 +2438,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -2483,13 +2446,13 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2506,7 +2469,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -2517,7 +2479,7 @@ mod test { vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); assert_eq!(res.limit, Some(97)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -2525,7 +2487,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2539,7 +2501,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -2547,7 +2508,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -2555,7 +2516,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2569,7 +2530,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -2577,7 +2537,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![8], vec![8])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -2585,7 +2545,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2599,7 +2559,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -2607,7 +2566,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); // right_to_left test let mut tree = make_6_node_tree(); @@ -2615,7 +2573,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2631,7 +2589,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -2647,7 +2604,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(1), false) + .create_full_proof(queryitems.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -2663,7 +2620,6 @@ mod test { bytes.as_slice(), &query, Some(2), - Some(1), false, tree.hash().unwrap(), ) @@ -2671,7 +2627,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -2681,7 +2637,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2765,7 +2721,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -2781,7 +2736,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -2789,13 +2744,13 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2812,7 +2767,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -2820,7 +2774,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -2828,13 +2782,13 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2851,7 +2805,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -2859,7 +2812,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -2867,13 +2820,13 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2890,7 +2843,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -2906,7 +2858,7 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -2914,7 +2866,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2928,7 +2880,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -2936,7 +2887,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -2944,7 +2895,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2958,7 +2909,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -2966,7 +2916,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -2974,7 +2924,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -2988,7 +2938,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -2996,7 +2945,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); // right_to_left proof let mut tree = make_6_node_tree(); @@ -3004,7 +2952,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3020,7 +2968,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -3041,7 +2988,7 @@ mod test { let queryitems = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, false) + .create_full_proof(queryitems.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -3057,7 +3004,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, false, tree.hash().unwrap(), ) @@ -3065,7 +3011,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + } #[test] @@ -3075,7 +3021,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3159,7 +3105,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -3175,7 +3120,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3183,13 +3128,13 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3206,7 +3151,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -3214,7 +3158,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -3222,13 +3166,13 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3245,7 +3189,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -3253,7 +3196,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -3261,13 +3204,13 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3284,7 +3227,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -3300,7 +3242,7 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -3308,7 +3250,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3322,7 +3264,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -3330,7 +3271,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -3338,7 +3279,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3352,7 +3293,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -3360,7 +3300,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -3368,7 +3308,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3382,7 +3322,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -3390,7 +3329,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); // right_to_left proof let mut tree = make_6_node_tree(); @@ -3398,7 +3336,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3414,7 +3352,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -3435,7 +3372,7 @@ mod test { let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), false) + .create_full_proof(queryitems.as_slice(), Some(1), false) .unwrap() .expect("create_proof errored"); @@ -3451,7 +3388,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), false, tree.hash().unwrap(), ) @@ -3459,7 +3395,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -3469,7 +3405,7 @@ mod test { let queryitems = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3553,7 +3489,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -3569,7 +3504,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3577,13 +3512,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3600,7 +3535,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -3608,7 +3542,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -3616,13 +3550,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3639,7 +3573,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -3647,7 +3580,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -3655,13 +3588,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3678,7 +3611,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -3694,7 +3626,7 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -3702,7 +3634,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3716,7 +3648,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -3724,7 +3655,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -3732,7 +3663,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3746,7 +3677,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -3754,7 +3684,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -3762,7 +3692,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -3776,7 +3706,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -3784,7 +3713,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); // right_to_left proof let mut tree = make_6_node_tree(); @@ -3792,7 +3720,7 @@ mod test { let queryitems = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3808,7 +3736,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -3829,7 +3756,7 @@ mod test { let queryitems = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), None, false) + .create_full_proof(queryitems.as_slice(), Some(3), false) .unwrap() .expect("create_proof errored"); @@ -3845,7 +3772,6 @@ mod test { bytes.as_slice(), &query, Some(3), - None, false, tree.hash().unwrap(), ) @@ -3856,7 +3782,7 @@ mod test { vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + } #[test] @@ -3866,7 +3792,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3945,7 +3871,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -3953,7 +3878,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3961,13 +3886,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3984,7 +3909,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -3992,7 +3916,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -4000,13 +3924,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4023,7 +3947,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -4031,7 +3954,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -4039,13 +3962,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4062,7 +3985,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -4070,7 +3992,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(98)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -4078,7 +4000,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4092,7 +4014,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -4100,7 +4021,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -4108,7 +4029,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4122,7 +4043,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -4130,7 +4050,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -4138,7 +4058,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4152,7 +4072,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -4160,7 +4079,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(198)); // right_to_left let mut tree = make_6_node_tree(); @@ -4168,7 +4086,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4184,7 +4102,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -4197,7 +4114,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(300), Some(1), false) + .create_full_proof(queryitems.as_slice(), Some(300), false) .unwrap() .expect("create_proof errored"); @@ -4213,7 +4130,6 @@ mod test { bytes.as_slice(), &query, Some(300), - Some(1), false, tree.hash().unwrap(), ) @@ -4221,7 +4137,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(299)); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -4231,7 +4147,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4309,7 +4225,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -4320,7 +4235,7 @@ mod test { vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -4328,13 +4243,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4351,7 +4266,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -4359,7 +4273,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -4367,13 +4281,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4390,7 +4304,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -4398,7 +4311,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -4406,13 +4319,13 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4429,7 +4342,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -4440,7 +4352,7 @@ mod test { vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); assert_eq!(res.limit, Some(97)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -4448,7 +4360,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4462,7 +4374,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(1), true, tree.hash().unwrap(), ) @@ -4470,7 +4381,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -4478,7 +4389,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4492,7 +4403,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -4500,7 +4410,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -4508,7 +4418,7 @@ mod test { let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4522,7 +4432,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -4530,7 +4439,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); // right_to_left proof // let mut tree = make_6_node_tree(); @@ -4574,7 +4482,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4664,7 +4572,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -4682,7 +4589,7 @@ mod test { ], ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); + // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -4690,13 +4597,13 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4713,7 +4620,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -4721,7 +4627,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -4729,13 +4635,13 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4752,7 +4658,6 @@ mod test { bytes.as_slice(), &query, Some(2), - None, true, tree.hash().unwrap(), ) @@ -4760,7 +4665,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -4768,13 +4673,13 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(queryitems.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); let equivalent_queryitems = vec![QueryItem::RangeFull(..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4791,7 +4696,6 @@ mod test { bytes.as_slice(), &query, Some(100), - None, true, tree.hash().unwrap(), ) @@ -4809,7 +4713,7 @@ mod test { ], ); assert_eq!(res.limit, Some(94)); - assert_eq!(res.offset, None); + // skip 1 element let mut tree = make_6_node_tree(); @@ -4817,7 +4721,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), Some(1), true) + .create_full_proof(queryitems.as_slice(), Some(3), true) .unwrap() .expect("create_proof errored"); @@ -4831,7 +4735,6 @@ mod test { bytes.as_slice(), &query, Some(3), - Some(1), true, tree.hash().unwrap(), ) @@ -4842,7 +4745,7 @@ mod test { vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip 2 elements let mut tree = make_6_node_tree(); @@ -4850,7 +4753,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); @@ -4864,7 +4767,6 @@ mod test { bytes.as_slice(), &query, Some(2), - Some(2), true, tree.hash().unwrap(), ) @@ -4872,7 +4774,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + // skip all elements let mut tree = make_6_node_tree(); @@ -4880,7 +4782,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4894,7 +4796,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(200), true, tree.hash().unwrap(), ) @@ -4902,7 +4803,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(194)); // right_to_left proof let mut tree = make_6_node_tree(); @@ -4910,7 +4810,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4926,7 +4826,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -4949,7 +4848,7 @@ mod test { let queryitems = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(2), false) + .create_full_proof(queryitems.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -4965,7 +4864,6 @@ mod test { bytes.as_slice(), &query, Some(2), - Some(2), false, tree.hash().unwrap(), ) @@ -4973,7 +4871,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -4982,14 +4880,13 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; - let (proof, _, limit, offset) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + let (proof, _, limit) = walker + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); // TODO: Add this test for other range types assert_eq!(limit, Some(0)); - assert_eq!(offset, None); let mut iter = proof.iter(); assert_eq!( @@ -5047,7 +4944,6 @@ mod test { bytes.as_slice(), &query, Some(1), - None, true, tree.hash().unwrap(), ) @@ -5055,7 +4951,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); + } #[test] @@ -5065,7 +4961,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + .create_full_proof(queryitems.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -5131,7 +5027,6 @@ mod test { bytes.as_slice(), &query, Some(1), - Some(2), true, tree.hash().unwrap(), ) @@ -5139,7 +5034,7 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + } #[test] @@ -5149,7 +5044,7 @@ mod test { let queryitems = vec![QueryItem::RangeFrom(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(queryitems.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -5235,7 +5130,6 @@ mod test { bytes.as_slice(), &query, None, - None, false, tree.hash().unwrap(), ) @@ -5262,7 +5156,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 6, 5], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5346,7 +5240,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -5371,7 +5264,7 @@ mod test { QueryItem::Range(vec![0, 0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(queryitems.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5454,7 +5347,6 @@ mod test { bytes.as_slice(), &query, None, - None, true, tree.hash().unwrap(), ) @@ -5477,7 +5369,7 @@ mod test { query.insert_all(); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5488,7 +5380,7 @@ mod test { let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 6]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .unwrap(); @@ -5503,7 +5395,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); query.insert_range(vec![0, 0, 0, 0, 0, 0, 0, 7]..vec![0, 0, 0, 0, 0, 0, 0, 10]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5513,7 +5405,7 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 3]..=vec![0, 0, 0, 0, 0, 0, 0, 4]); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 7]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .unwrap(); @@ -5533,7 +5425,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); query.insert_range(vec![0, 0, 0, 0, 0, 0, 0, 6]..vec![0, 0, 0, 0, 0, 0, 0, 10]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5542,7 +5434,7 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 4]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .unwrap(); @@ -5563,7 +5455,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 3]); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5572,7 +5464,7 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .unwrap(); @@ -5592,7 +5484,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), Some(5), None, true) + .create_full_proof(query.items.as_slice(), Some(5), true) .unwrap() .expect("create_proof errored"); @@ -5601,7 +5493,7 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, Some(5), None, true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, Some(5), true, expected_hash) .unwrap() .unwrap(); @@ -5621,7 +5513,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, Some(1), true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5630,7 +5522,7 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, Some(1), true, expected_hash) + let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) .unwrap() .unwrap(); @@ -5665,7 +5557,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), Some(3), None, true) + .create_full_proof(query.items.as_slice(), Some(3), true) .unwrap() .expect("create_proof errored"); @@ -5676,7 +5568,7 @@ mod test { let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 4]); assert!( - verify_query(bytes.as_slice(), &query, Some(3), None, true, expected_hash) + verify_query(bytes.as_slice(), &query, Some(3), true, expected_hash) .unwrap() .is_err() ); @@ -5686,7 +5578,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); assert!( - verify_query(bytes.as_slice(), &query, Some(4), None, true, expected_hash) + verify_query(bytes.as_slice(), &query, Some(4), true, expected_hash) .unwrap() .is_err() ); @@ -5695,7 +5587,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); assert!( - verify_query(bytes.as_slice(), &query, Some(2), None, true, expected_hash) + verify_query(bytes.as_slice(), &query, Some(2), true, expected_hash) .unwrap() .is_err() ); @@ -5755,7 +5647,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, ..) = walker - .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, None, true) + .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, true) .unwrap() .expect("failed to create proof"); let mut bytes = vec![]; @@ -5780,7 +5672,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, ..) = walker - .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, None, true) + .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, true) .unwrap() .expect("failed to create proof"); let mut bytes = vec![]; @@ -5806,7 +5698,6 @@ mod test { .collect::>() .as_slice(), None, - None, true, ) .unwrap() @@ -5819,7 +5710,7 @@ mod test { query.insert_key(key.clone()); } - let _result = verify_query(bytes.as_slice(), &query, None, None, true, [42; 32]) + let _result = verify_query(bytes.as_slice(), &query, None, true, [42; 32]) .unwrap() .expect("verify failed"); } diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 39ff471a..bb3283f0 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -12,7 +12,7 @@ use crate::{ }; #[cfg(any(feature = "full", feature = "verify"))] -pub type ProofAbsenceLimitOffset = (LinkedList, (bool, bool), Option, Option); +pub type ProofAbsenceLimit = (LinkedList, (bool, bool), Option); #[cfg(feature = "full")] /// Verify proof against expected hash @@ -52,7 +52,6 @@ pub fn execute_proof( bytes: &[u8], query: &Query, limit: Option, - offset: Option, left_to_right: bool, ) -> CostResult<(MerkHash, ProofVerificationResult), Error> { let mut cost = OperationCost::default(); @@ -62,7 +61,6 @@ pub fn execute_proof( let mut query = query.directional_iter(left_to_right).peekable(); let mut in_range = false; let mut current_limit = limit; - let mut current_offset = offset; let ops = Decoder::new(bytes); @@ -191,22 +189,6 @@ pub fn execute_proof( // this push matches the queried item if query_item.contains(key) { - // if there are still offset slots, and node is of type kvdigest - // reduce the offset counter - // also, verify that a kv node was not pushed before offset is exhausted - if let Some(offset) = current_offset { - if offset > 0 && value.is_none() { - current_offset = Some(offset - 1); - break; - } else if offset > 0 && value.is_some() { - // inserting a kv node before exhausting offset - return Err(Error::InvalidProofError( - "Proof returns data before offset is exhausted".to_string(), - )); - } - } - - // offset is equal to zero or none if let Some(val) = value { if let Some(limit) = current_limit { if limit == 0 { @@ -220,6 +202,11 @@ pub fn execute_proof( } } } + println!("pushing {:?}", ProvedKeyValue { + key: key.clone(), + value: val.clone(), + proof: value_hash, + }); // add data to output output.push(ProvedKeyValue { key: key.clone(), @@ -242,12 +229,16 @@ pub fn execute_proof( }; if let Node::KV(key, value) = node { + println!("going into kv"); execute_node(key, Some(value), value_hash(value).unwrap())?; } else if let Node::KVValueHash(key, value, value_hash) = node { + println!("going into kv hash"); execute_node(key, Some(value), *value_hash)?; } else if let Node::KVDigest(key, value_hash) = node { + println!("going into kv digest"); execute_node(key, None, *value_hash)?; } else if let Node::KVRefValueHash(key, value, value_hash) = node { + println!("going into kv ref value hash"); execute_node(key, Some(value), *value_hash)?; } else if in_range { // we encountered a queried range but the proof was abridged (saw a @@ -293,7 +284,6 @@ pub fn execute_proof( ProofVerificationResult { result_set: output, limit: current_limit, - offset: current_offset, }, )) .wrap_with_cost(cost) @@ -319,8 +309,6 @@ pub struct ProofVerificationResult { pub result_set: Vec, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } #[cfg(any(feature = "full", feature = "verify"))] @@ -329,11 +317,10 @@ pub fn verify_query( bytes: &[u8], query: &Query, limit: Option, - offset: Option, left_to_right: bool, expected_hash: MerkHash, ) -> CostResult { - execute_proof(bytes, query, limit, offset, left_to_right) + execute_proof(bytes, query, limit, left_to_right) .map_ok(|(root_hash, verification_result)| { if root_hash == expected_hash { Ok(verification_result) diff --git a/tutorials/src/bin/proofs.rs b/tutorials/src/bin/proofs.rs index 173b700d..d56abbda 100644 --- a/tutorials/src/bin/proofs.rs +++ b/tutorials/src/bin/proofs.rs @@ -33,7 +33,7 @@ fn main() { .expect("expected successful get_path_query"); // Generate proof. - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); // Get hash from query proof and print to terminal along with GroveDB root hash. let (hash, _result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); diff --git a/tutorials/src/bin/replication.rs b/tutorials/src/bin/replication.rs index bfdc1782..6b5f0626 100644 --- a/tutorials/src/bin/replication.rs +++ b/tutorials/src/bin/replication.rs @@ -229,7 +229,7 @@ fn query_db(db: &GroveDb, path: &[&[u8]], key: Vec) { println!(">> {:?}", e); } - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); // Get hash from query proof and print to terminal along with GroveDB root hash. let (verify_hash, _) = GroveDb::verify_query(&proof, &path_query).unwrap(); println!("verify_hash: {:?}", hex::encode(verify_hash)); From b5f37c247a24991cf8ce38ee3b821ac80acac0c5 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Wed, 3 Jul 2024 08:48:24 +0700 Subject: [PATCH 03/34] new proof system --- grovedb/Cargo.toml | 1 + grovedb/src/operations.rs | 45 -- grovedb/src/operations/get/query.rs | 2 +- grovedb/src/operations/mod.rs | 20 + grovedb/src/operations/proof.rs | 39 -- grovedb/src/operations/proof/generate.rs | 26 +- grovedb/src/operations/proof/mod.rs | 11 + grovedb/src/operations/proof_v2/generate.rs | 583 ++++++++++++++++++++ grovedb/src/operations/proof_v2/mod.rs | 7 + grovedb/src/query/mod.rs | 88 +++ grovedb/src/query_result_type.rs | 85 +++ grovedb/src/tests/mod.rs | 3 +- grovedb/src/tests/query_tests.rs | 2 +- 13 files changed, 818 insertions(+), 94 deletions(-) delete mode 100644 grovedb/src/operations.rs create mode 100644 grovedb/src/operations/mod.rs delete mode 100644 grovedb/src/operations/proof.rs create mode 100644 grovedb/src/operations/proof/mod.rs create mode 100644 grovedb/src/operations/proof_v2/generate.rs create mode 100644 grovedb/src/operations/proof_v2/mod.rs diff --git a/grovedb/Cargo.toml b/grovedb/Cargo.toml index 992cb19e..10a1e0ce 100644 --- a/grovedb/Cargo.toml +++ b/grovedb/Cargo.toml @@ -19,6 +19,7 @@ grovedb-storage = { version = "1.0.0-rc.2", path = "../storage", optional = true grovedb-visualize = { version = "1.0.0-rc.2", path = "../visualize", optional = true } hex = { version = "0.4.3"} itertools = { version = "0.12.1", optional = true } +derive_more = { version = "0.99.18" } integer-encoding = { version = "4.0.0", optional = true } grovedb-costs = { version = "1.0.0-rc.2", path = "../costs", optional = true } nohash-hasher = { version = "0.2.0", optional = true } diff --git a/grovedb/src/operations.rs b/grovedb/src/operations.rs deleted file mode 100644 index 9864b0bc..00000000 --- a/grovedb/src/operations.rs +++ /dev/null @@ -1,45 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Operations for the manipulation of GroveDB state - -#[cfg(feature = "full")] -pub(crate) mod auxiliary; -#[cfg(feature = "full")] -pub mod delete; -#[cfg(feature = "full")] -pub(crate) mod get; -#[cfg(feature = "full")] -pub mod insert; -#[cfg(feature = "full")] -pub(crate) mod is_empty_tree; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod proof; - -#[cfg(feature = "full")] -pub use get::{QueryItemOrSumReturnType, MAX_REFERENCE_HOPS}; diff --git a/grovedb/src/operations/get/query.rs b/grovedb/src/operations/get/query.rs index 810a2f15..1589749c 100644 --- a/grovedb/src/operations/get/query.rs +++ b/grovedb/src/operations/get/query.rs @@ -45,7 +45,7 @@ use crate::{ reference_path::ReferencePathType, Element, Error, GroveDb, PathQuery, TransactionArg, }; -use crate::operations::proof::ProveOptions; +use crate::operations::proof_v2::ProveOptions; #[cfg(feature = "full")] #[derive(Debug, Eq, PartialEq, Clone)] diff --git a/grovedb/src/operations/mod.rs b/grovedb/src/operations/mod.rs new file mode 100644 index 00000000..01f26ae1 --- /dev/null +++ b/grovedb/src/operations/mod.rs @@ -0,0 +1,20 @@ +//! Operations for the manipulation of GroveDB state + +#[cfg(feature = "full")] +pub(crate) mod auxiliary; +#[cfg(feature = "full")] +pub mod delete; +#[cfg(feature = "full")] +pub(crate) mod get; +#[cfg(feature = "full")] +pub mod insert; +#[cfg(feature = "full")] +pub(crate) mod is_empty_tree; +#[cfg(any(feature = "full", feature = "verify"))] +pub mod proof; + +#[cfg(any(feature = "full", feature = "verify"))] +pub mod proof_v2; + +#[cfg(feature = "full")] +pub use get::{QueryItemOrSumReturnType, MAX_REFERENCE_HOPS}; diff --git a/grovedb/src/operations/proof.rs b/grovedb/src/operations/proof.rs deleted file mode 100644 index 88624f77..00000000 --- a/grovedb/src/operations/proof.rs +++ /dev/null @@ -1,39 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Proof operations - -#[cfg(feature = "full")] -mod generate; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod util; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod verify; - -#[cfg(feature = "full")] -pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 01b96b07..f4974bd6 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -32,6 +32,7 @@ // that supports multiple implementations for verbose and non-verbose // generation +use std::collections::BTreeMap; use grovedb_costs::{ cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, CostsExt, OperationCost, @@ -41,6 +42,7 @@ use grovedb_merk::{ tree::value_hash, KVIterator, Merk, ProofWithoutEncodingResult, }; +use grovedb_merk::proofs::query::{Key, Path}; use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; @@ -54,7 +56,7 @@ use crate::{ versioning::{prepend_version_to_bytes, PROOF_VERSION}, Element, Error, GroveDb, PathQuery, Query, }; -use crate::query_result_type::QueryResultType; +use crate::query_result_type::{BTreeMapLevelResult, QueryResultType}; #[derive(Debug, Clone, Copy)] pub struct ProveOptions { @@ -162,9 +164,11 @@ impl GroveDb { let result = cost_return_on_error!( &mut cost, self.query(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0; - Some(result.to_path_to_key_elements_btree_map()) + Some(result.to_btree_map_level_results()) }; + println!("precomputed results are {:?}", precomputed_result_map); + cost_return_on_error!( &mut cost, self.prove_subqueries( @@ -173,7 +177,8 @@ impl GroveDb { path_query, &mut limit, true, - is_verbose + is_verbose, + &precomputed_result_map, ) ); cost_return_on_error!( @@ -194,6 +199,7 @@ impl GroveDb { current_limit: &mut Option, is_first_call: bool, is_verbose: bool, + precomputed_results: &Option ) -> CostResult<(), Error> { let mut cost = OperationCost::default(); let mut to_add_to_result_set: u16 = 0; @@ -210,6 +216,8 @@ impl GroveDb { return Ok(()).wrap_with_cost(cost); } + let precomputed_items_count = precomputed_results.as_ref().map(|level_results| level_results.len_of_values_at_path(path.as_slice())); + let reached_limit = current_limit.map_or(false, |limit| limit == 0); if reached_limit { if is_first_call { @@ -232,7 +240,6 @@ impl GroveDb { let mut is_leaf_tree = true; - let mut offset_inc = 0; let mut limit_inc = 0; let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) @@ -244,7 +251,6 @@ impl GroveDb { let mut encountered_absence = false; let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); - println!("Element is {:?}", element); match element { Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { let (mut subquery_path, subquery_value) = @@ -264,6 +270,11 @@ impl GroveDb { // if the element is a non-empty tree then current tree is not a leaf tree if is_leaf_tree { + let proof_token_type = if precomputed_items_count.is_some() { + ProofTokenType::SizedMerk + } else { + ProofTokenType::Merk + }; is_leaf_tree = false; cost_return_on_error!( &mut cost, @@ -271,8 +282,8 @@ impl GroveDb { &path.as_slice().into(), &subtree, &query.query.query, - None, - ProofTokenType::Merk, + precomputed_items_count, + proof_token_type, proofs, is_verbose, path.iter().last().unwrap_or(&Default::default()) @@ -417,6 +428,7 @@ impl GroveDb { current_limit, false, is_verbose, + precomputed_results, ) ); diff --git a/grovedb/src/operations/proof/mod.rs b/grovedb/src/operations/proof/mod.rs new file mode 100644 index 00000000..f505b3eb --- /dev/null +++ b/grovedb/src/operations/proof/mod.rs @@ -0,0 +1,11 @@ +//! Proof operations + +// #[cfg(feature = "full")] +// mod generate; +#[cfg(any(feature = "full", feature = "verify"))] +pub mod util; +#[cfg(any(feature = "full", feature = "verify"))] +pub mod verify; + +// #[cfg(feature = "full")] +// pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof_v2/generate.rs b/grovedb/src/operations/proof_v2/generate.rs new file mode 100644 index 00000000..93032723 --- /dev/null +++ b/grovedb/src/operations/proof_v2/generate.rs @@ -0,0 +1,583 @@ +//! Generate proof operations + +use std::collections::BTreeMap; +use bincode::{Decode, Encode}; +use derive_more::From; +use grovedb_costs::{ + cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, + CostsExt, OperationCost, +}; +use grovedb_merk::{ + proofs::{encode_into, Node, Op}, + tree::value_hash, + KVIterator, Merk, ProofWithoutEncodingResult, +}; +use grovedb_merk::proofs::query::{Key, QueryItem}; +use grovedb_path::SubtreePath; +use grovedb_storage::StorageContext; + +use crate::{ + element::helpers::raw_decode, + operations::proof::util::{ + increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, + write_slice_to_vec, write_to_vec, ProofTokenType, + }, + reference_path::path_from_reference_path_type, + Element, Error, GroveDb, PathQuery, Query, +}; +use crate::query_result_type::{BTreeMapLevelResult, BTreeMapLevelResultOrItem, QueryResultType}; + +#[derive(Debug, Clone, Copy)] +pub struct ProveOptions { + pub is_verbose: bool, + pub multilevel_results: bool, +} + +impl Default for ProveOptions { + fn default() -> Self { + ProveOptions { + is_verbose: false, + multilevel_results: false, + } + } +} + +#[derive(Encode, Decode)] +pub struct LayerProof { + pub merk_proof : Vec, + pub lower_layers : BTreeMap +} + +#[derive(Encode, Decode, From)] +pub enum GroveDBProof { + V0(GroveDBProofV0) +} + +#[derive(Encode, Decode)] +pub struct GroveDBProofV0 { + pub root_layer : LayerProof, +} + +impl GroveDb { + /// Prove one or more path queries. + /// If we have more than one path query, we merge into a single path query before + /// proving. + pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { + if query.len() > 1 { + let query = cost_return_on_error_default!(PathQuery::merge(query)); + self.prove_query(&query, prove_options) + } else { + self.prove_query(query[0], prove_options) + } + } + + /// Generate a minimalistic proof for a given path query + /// doesn't allow for subset verification + /// Proofs generated with this can only be verified by the path query used + /// to generate them. + pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { + self.prove_internal_serialized(query, prove_options) + } + + /// Generates a proof and serializes it + fn prove_internal_serialized(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { + let mut cost = OperationCost::default(); + let proof = cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let encoded_proof = cost_return_on_error_no_add!(&cost, bincode::encode_to_vec(proof, config).map_err(|e| Error::CorruptedData(format!("unable to encode proof {}", e)))); + Ok(encoded_proof).wrap_with_cost(cost) + } + + /// Generates a proof + fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult { + let ProveOptions { + is_verbose, multilevel_results + } = prove_options.unwrap_or_default(); + let mut cost = OperationCost::default(); + + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { + return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); + } + + // we want to query raw because we want the references to not be resolved at this point + + let precomputed_result_map = cost_return_on_error!( + &mut cost, + self.query_raw(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0.to_btree_map_level_results(); + + + println!("precomputed results are {:?}", precomputed_result_map); + + let root_layer = cost_return_on_error!( + &mut cost, + self.prove_subqueries( + vec![], + path_query, + precomputed_result_map, + ) + ); + + Ok(GroveDBProofV0 { + root_layer + }.into()).wrap_with_cost(cost) + } + + /// Perform a pre-order traversal of the tree based on the provided + /// subqueries + fn prove_subqueries( + &self, + path: Vec<&[u8]>, + path_query: &PathQuery, + layer_precomputed_results: BTreeMapLevelResult, + ) -> CostResult { + + let mut cost = OperationCost::default(); + + let (query_at_path, left_to_right) = cost_return_on_error_no_add!( + &cost, + path_query.query_items_at_path(path.as_slice()).ok_or(Error::CorruptedPath("path should be part of path_query"))); + + + + let subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path(path.as_slice().into(), None) + ); + + let limit = layer_precomputed_results.key_values.len(); + + let merk_proof = cost_return_on_error!(&mut cost, self.generate_merk_proof( + &path.as_slice().into(), + &subtree, + &query_at_path, + left_to_right, + Some(limit as u16), + )); + + let lower_layers = cost_return_on_error_no_add!( + &cost, + layer_precomputed_results.key_values.into_iter().filter_map(|(key, value)| { + match value { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(layer) => { + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + match self.prove_subqueries(lower_path, path_query, layer).unwrap_add_cost(&mut cost) { + Ok(layer_proof) => { + Some(Ok((key, layer_proof))) + } + Err(e) => { Some(Err(e))} + } + } + BTreeMapLevelResultOrItem::ResultItem(_) => { + None + } + } + + }).collect::, Error>>()); + + Ok(LayerProof { + merk_proof, + lower_layers + }).wrap_with_cost(cost) + } + + /// Generates query proof given a subtree and appends the result to a proof + /// list + fn generate_merk_proof<'a, S, B>( + &self, + path: &SubtreePath, + subtree: &'a Merk, + query_items: &Vec, + left_to_right: bool, + limit: Option, + ) -> CostResult, Error> + where + S: StorageContext<'a> + 'a, + B: AsRef<[u8]>, + { + let mut cost = OperationCost::default(); + + let mut proof_result = cost_return_on_error_no_add!( + &cost, + subtree + .prove_unchecked(query_items.clone(), limit, left_to_right) + .unwrap() + .map_err(|_e| Error::InternalError("failed to generate proof")) + ); + + cost_return_on_error!(&mut cost, self.post_process_merk_proof(path, &mut proof_result)); + + let mut proof_bytes = Vec::with_capacity(128); + encode_into(proof_result.iter(), &mut proof_bytes); + + Ok(proof_bytes).wrap_with_cost(cost) + } + + /// Serializes a path and add it to the proof vector + fn generate_and_store_path_proof( + path: Vec<&[u8]>, + proofs: &mut Vec, + ) -> CostResult<(), Error> { + let cost = OperationCost::default(); + + cost_return_on_error_no_add!( + &cost, + write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) + ); + + cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); + + Ok(()).wrap_with_cost(cost) + } + + fn generate_and_store_absent_path_proof( + &self, + path_slices: &[&[u8]], + proof_result: &mut Vec, + is_verbose: bool, + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + + cost_return_on_error_no_add!( + &cost, + write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) + ); + let mut current_path: Vec<&[u8]> = vec![]; + + let mut split_path = path_slices.split_first(); + while let Some((key, path_slice)) = split_path { + let subtree = self + .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) + .unwrap_add_cost(&mut cost); + + let Ok(subtree) = subtree else { + break; + }; + + let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); + + let mut next_key_query = Query::new(); + next_key_query.insert_key(key.to_vec()); + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + ¤t_path.as_slice().into(), + &subtree, + &next_key_query, + None, + ProofTokenType::Merk, + proof_result, + is_verbose, + current_path.iter().last().unwrap_or(&(&[][..])) + ) + ); + + current_path.push(key); + + if has_item.is_err() || path_slice.is_empty() { + // reached last key + break; + } + + split_path = path_slice.split_first(); + } + + Ok(()).wrap_with_cost(cost) + } + + /// Converts Items to Node::KV from Node::KVValueHash + /// Converts References to Node::KVRefValueHash and sets the value to the + /// referenced element + fn post_process_merk_proof>( + &self, + path: &SubtreePath, + proof_result: &mut ProofWithoutEncodingResult, + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + + for op in proof_result.proof.iter_mut() { + match op { + Op::Push(node) | Op::PushInverted(node) => match node { + Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { + let elem = Element::deserialize(value); + match elem { + Ok(Element::Reference(reference_path, ..)) => { + let absolute_path = cost_return_on_error!( + &mut cost, + path_from_reference_path_type( + reference_path, + &path.to_vec(), + Some(key.as_slice()) + ) + .wrap_with_cost(OperationCost::default()) + ); + + let referenced_elem = cost_return_on_error!( + &mut cost, + self.follow_reference( + absolute_path.as_slice().into(), + true, + None + ) + ); + + let serialized_referenced_elem = referenced_elem.serialize(); + if serialized_referenced_elem.is_err() { + return Err(Error::CorruptedData(String::from( + "unable to serialize element", + ))) + .wrap_with_cost(cost); + } + + *node = Node::KVRefValueHash( + key.to_owned(), + serialized_referenced_elem.expect("confirmed ok above"), + value_hash(value).unwrap_add_cost(&mut cost), + ) + } + Ok(Element::Item(..)) => { + *node = Node::KV(key.to_owned(), value.to_owned()) + } + _ => continue, + } + } + _ => continue, + }, + _ => continue, + } + } + Ok(()).wrap_with_cost(cost) + } +} +// +// #[cfg(test)] +// mod tests { +// use grovedb_merk::{execute_proof, proofs::Query}; +// use grovedb_storage::StorageBatch; +// +// use crate::{ +// operations::proof::util::{ProofReader, ProofTokenType}, +// tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, +// GroveDb, +// }; +// +// #[test] +// fn test_path_info_encoding_and_decoding() { +// let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; +// let mut proof_vector = vec![]; +// GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) +// .unwrap() +// .unwrap(); +// +// let mut proof_reader = ProofReader::new(proof_vector.as_slice()); +// let decoded_path = proof_reader.read_path_info().unwrap(); +// +// assert_eq!(path, decoded_path); +// } +// +// #[test] +// fn test_reading_of_verbose_proofs() { +// let db = make_deep_tree(); +// +// let path = vec![TEST_LEAF, b"innertree"]; +// let mut query = Query::new(); +// query.insert_all(); +// +// let batch = StorageBatch::new(); +// +// let merk = db +// .open_non_transactional_merk_at_path( +// [TEST_LEAF, b"innertree"].as_ref().into(), +// Some(&batch), +// ) +// .unwrap() +// .unwrap(); +// let expected_root_hash = merk.root_hash().unwrap(); +// +// let mut proof = vec![]; +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proof, +// true, +// b"innertree", +// ) +// .unwrap() +// .unwrap(); +// assert_ne!(proof.len(), 0); +// +// let mut proof_reader = ProofReader::new(&proof); +// let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// assert_eq!(key, Some(b"innertree".to_vec())); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) +// .unwrap() +// .unwrap(); +// assert_eq!(root_hash, expected_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // what is the key is empty?? +// let merk = db +// .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) +// .unwrap() +// .unwrap(); +// let expected_root_hash = merk.root_hash().unwrap(); +// +// let mut proof = vec![]; +// db.generate_and_store_merk_proof( +// &EMPTY_PATH, +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proof, +// true, +// &[], +// ) +// .unwrap() +// .unwrap(); +// assert_ne!(proof.len(), 0); +// +// let mut proof_reader = ProofReader::new(&proof); +// let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// assert_eq!(key, Some(vec![])); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) +// .unwrap() +// .unwrap(); +// assert_eq!(root_hash, expected_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// } +// +// #[test] +// fn test_reading_verbose_proof_at_key() { +// // going to generate an array of multiple proofs with different keys +// let db = make_deep_tree(); +// let mut proofs = vec![]; +// +// let mut query = Query::new(); +// query.insert_all(); +// +// // insert all under inner tree +// let path = vec![TEST_LEAF, b"innertree"]; +// +// let batch = StorageBatch::new(); +// +// let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) +// .unwrap() +// .unwrap(); +// let inner_tree_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // insert all under innertree4 +// let path = vec![TEST_LEAF, b"innertree4"]; +// let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) +// .unwrap() +// .unwrap(); +// let inner_tree_4_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // insert all for deeper_1 +// let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; +// let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) +// .unwrap() +// .unwrap(); +// let deeper_1_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // read the proof at innertree +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = proof_reader +// .read_verbose_proof_at_key(b"innertree") +// .unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) +// .unwrap() +// .unwrap(); +// assert_eq!(root_hash, inner_tree_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // read the proof at innertree4 +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = proof_reader +// .read_verbose_proof_at_key(b"innertree4") +// .unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) +// .unwrap() +// .unwrap(); +// assert_eq!(root_hash, inner_tree_4_root_hash); +// assert_eq!(result_set.result_set.len(), 2); +// +// // read the proof at deeper_1 +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = +// proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) +// .unwrap() +// .unwrap(); +// assert_eq!(root_hash, deeper_1_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // read the proof at an invalid key +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); +// assert!(reading_result.is_err()) +// } +// } diff --git a/grovedb/src/operations/proof_v2/mod.rs b/grovedb/src/operations/proof_v2/mod.rs new file mode 100644 index 00000000..2e4ae617 --- /dev/null +++ b/grovedb/src/operations/proof_v2/mod.rs @@ -0,0 +1,7 @@ +//! Proof operations + +#[cfg(feature = "full")] +mod generate; + +#[cfg(feature = "full")] +pub use generate::ProveOptions; diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 67451b8a..75778137 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -28,6 +28,7 @@ //! Queries +use std::borrow::Cow; use std::cmp::Ordering; #[cfg(any(feature = "full", feature = "verify"))] @@ -269,6 +270,93 @@ impl PathQuery { } } } + pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option<(Cow<'a, Vec>, bool)> { + fn recursive_query_items<'b>(query: &'b Query, path: &[&[u8]]) -> Option<(Cow<'b, Vec>, bool)> { + if path.is_empty() { + return Some((Cow::Borrowed(&query.items), query.left_to_right)); + } + + let key = path[0]; + + if let Some(conditional_branches) = &query.conditional_subquery_branches { + for (query_item, subquery_branch) in conditional_branches { + if query_item.contains(key) { + if let Some(subquery_path) = &subquery_branch.subquery_path { + if path.len() <= subquery_path.len() { + if path.iter().zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + return if path.len() == subquery_path.len() { + if let Some(subquery) = &subquery_branch.subquery { + Some((Cow::Borrowed(&subquery.items), subquery.left_to_right)) + } else { + None + } + } else { + Some((Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), true)) + } + } + } + } + + return if let Some(subquery) = &subquery_branch.subquery { + recursive_query_items(subquery, &path[1..]) + } else { + Some((Cow::Owned(vec![QueryItem::Key(key.to_vec())]), true)) + } + } + } + } + + if let Some(subquery_path) = &query.default_subquery_branch.subquery_path { + if path.len() <= subquery_path.len() { + if path.iter().zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + return if path.len() == subquery_path.len() { + if let Some(subquery) = &query.default_subquery_branch.subquery { + Some((Cow::Borrowed(&subquery.items), subquery.left_to_right)) + } else { + None + } + } else { + Some((Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), true)) + } + } + } else if path.iter().take(subquery_path.len()).zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + if let Some(subquery) = &query.default_subquery_branch.subquery { + return recursive_query_items(subquery, &path[subquery_path.len()..]); + } + } + } else if let Some(subquery) = &query.default_subquery_branch.subquery { + return recursive_query_items(subquery, &path[1..]); + } + + None + } + + let self_path_len = self.path.len(); + let given_path_len = path.len(); + + match given_path_len.cmp(&self_path_len) { + Ordering::Less => { + if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { + Some((Cow::Owned(vec![QueryItem::Key(self.path[given_path_len].clone())]), true)) + } else { + None + } + } + Ordering::Equal => { + if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { + Some((Cow::Borrowed(&self.query.query.items), self.query.query.left_to_right)) + } else { + None + } + } + Ordering::Greater => { + if !self.path.iter().zip(path).all(|(a, b)| a.as_slice() == *b) { + return None; + } + recursive_query_items(&self.query.query, &path[self_path_len..]) + } + } + } } #[cfg(feature = "full")] diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 5eb50206..e7ce2d6a 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -55,6 +55,43 @@ pub struct QueryResultElements { pub elements: Vec, } +#[derive(Debug, Clone)] +pub enum BTreeMapLevelResultOrItem { + BTreeMapLevelResult(BTreeMapLevelResult), + ResultItem(Element), +} + +/// BTreeMap level result +#[derive(Debug, Clone)] +pub struct BTreeMapLevelResult { + pub key_values : BTreeMap +} + +impl BTreeMapLevelResult { + pub fn len_of_values_at_path(&self, path: &[&[u8]]) -> u16 { + let mut current = self; + + // Traverse the path + for segment in path { + match current.key_values.get(*segment) { + Some(BTreeMapLevelResultOrItem::BTreeMapLevelResult(next_level)) => { + current = next_level; + }, + Some(BTreeMapLevelResultOrItem::ResultItem(_)) => { + // We've reached a ResultItem before the end of the path + return 0; + }, + None => { + // Path not found + return 0; + } + } + } + + current.key_values.len() as u16 + } +} + impl QueryResultElements { /// New pub fn new() -> Self { @@ -263,6 +300,54 @@ impl QueryResultElements { map } + /// To last path to elements btree map + /// This is useful if the key is not import + pub fn to_btree_map_level_results(self) -> BTreeMapLevelResult { + fn insert_recursive( + current_level: &mut BTreeMapLevelResult, + mut path: std::vec::IntoIter>, + key: Vec, + element: Element, + ) { + if let Some(segment) = path.next() { + let next_level = current_level.key_values + .entry(segment) + .or_insert_with(|| BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new() + })); + + match next_level { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) => { + insert_recursive(inner, path, key, element); + }, + BTreeMapLevelResultOrItem::ResultItem(_) => { + // This shouldn't happen in a well-formed structure, but we'll handle it anyway + *next_level = BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new() + }); + if let BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) = next_level { + insert_recursive(inner, path, key, element); + } + }, + } + } else { + current_level.key_values.insert(key, BTreeMapLevelResultOrItem::ResultItem(element)); + } + } + + let mut root = BTreeMapLevelResult { + key_values: BTreeMap::new() + }; + + for result_item in self.elements { + if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = result_item { + insert_recursive(&mut root, path.into_iter(), key, element); + } + } + + root + } + /// To last path to keys btree map /// This is useful if for example the element is a sum item and isn't /// important Used in Platform Drive for getting voters for multiple diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index eef5a059..fec4b867 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -50,6 +50,7 @@ use crate::{ query_result_type::QueryResultType::QueryKeyElementPairResultType, reference_path::ReferencePathType, tests::common::compare_result_tuples, }; +use crate::operations::proof_v2::ProveOptions; use crate::query_result_type::QueryResultType; pub const TEST_LEAF: &[u8] = b"test_leaf"; @@ -2245,7 +2246,7 @@ fn test_path_query_proof_with_range_subquery_and_limit_with_sum_trees() { assert_eq!(key_elements, vec![(vec![97], Element::new_item("storage".as_bytes().to_vec())), (vec![49], Element::SumTree(Some(vec![0;32]), 2, None)), (vec![48], Element::new_item("v1".as_bytes().to_vec()))]); // Generate proof - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let proof = db.prove_query(&path_query, Some(ProveOptions { is_verbose: false, multilevel_results: true })).unwrap().unwrap(); // Verify proof let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).expect("proof verification failed"); diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index ab946e44..fd99e581 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -42,7 +42,7 @@ use crate::{ }, Element, GroveDb, PathQuery, SizedQuery, }; -use crate::operations::proof::ProveOptions; +use crate::operations::proof_v2::ProveOptions; fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { // Insert a couple of subtrees first From 414acd7229a9626aa18db83711be9962dcfee388 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Thu, 4 Jul 2024 03:43:16 +0700 Subject: [PATCH 04/34] much more work on new proof system --- grovedb/src/operations/get/query.rs | 9 +- grovedb/src/operations/mod.rs | 6 +- grovedb/src/operations/proof/generate.rs | 1139 ++--- grovedb/src/operations/proof/mod.rs | 12 +- grovedb/src/operations/proof/util.rs | 371 +- grovedb/src/operations/proof/verify.rs | 1162 ++--- grovedb/src/operations/proof_old/generate.rs | 938 ++++ grovedb/src/operations/proof_old/mod.rs | 11 + grovedb/src/operations/proof_old/util.rs | 501 ++ grovedb/src/operations/proof_old/verify.rs | 917 ++++ grovedb/src/operations/proof_v2/generate.rs | 583 --- grovedb/src/operations/proof_v2/mod.rs | 7 - grovedb/src/query/mod.rs | 111 +- grovedb/src/query_result_type.rs | 40 +- grovedb/src/tests/mod.rs | 4675 +++++++++--------- grovedb/src/tests/query_tests.rs | 654 +-- merk/src/merk/chunks.rs | 28 - merk/src/merk/prove.rs | 42 +- merk/src/proofs/chunk.rs | 28 - merk/src/proofs/mod.rs | 28 - merk/src/proofs/query/map.rs | 28 - merk/src/proofs/query/mod.rs | 421 +- merk/src/proofs/query/verify.rs | 13 +- merk/src/proofs/tree.rs | 28 - merk/src/tree/commit.rs | 28 - merk/src/tree/encoding.rs | 28 - merk/src/tree/fuzz_tests.rs | 28 - merk/src/tree/hash.rs | 28 - merk/src/tree/iter.rs | 28 - merk/src/tree/kv.rs | 28 - merk/src/tree/link.rs | 28 - merk/src/tree/mod.rs | 28 - merk/src/tree/ops.rs | 28 - merk/src/tree/tree_feature_type.rs | 28 - merk/src/tree/walk/fetch.rs | 28 - merk/src/tree/walk/mod.rs | 28 - merk/src/tree/walk/ref_walker.rs | 28 - 37 files changed, 5984 insertions(+), 6132 deletions(-) create mode 100644 grovedb/src/operations/proof_old/generate.rs create mode 100644 grovedb/src/operations/proof_old/mod.rs create mode 100644 grovedb/src/operations/proof_old/util.rs create mode 100644 grovedb/src/operations/proof_old/verify.rs delete mode 100644 grovedb/src/operations/proof_v2/generate.rs delete mode 100644 grovedb/src/operations/proof_v2/mod.rs diff --git a/grovedb/src/operations/get/query.rs b/grovedb/src/operations/get/query.rs index 1589749c..b4e57c3b 100644 --- a/grovedb/src/operations/get/query.rs +++ b/grovedb/src/operations/get/query.rs @@ -38,14 +38,16 @@ use integer_encoding::VarInt; #[cfg(feature = "full")] use crate::element::SumValue; -use crate::{element::QueryOptions, query_result_type::PathKeyOptionalElementTrio}; +use crate::{ + element::QueryOptions, operations::proof::ProveOptions, + query_result_type::PathKeyOptionalElementTrio, +}; #[cfg(feature = "full")] use crate::{ query_result_type::{QueryResultElement, QueryResultElements, QueryResultType}, reference_path::ReferencePathType, Element, Error, GroveDb, PathQuery, TransactionArg, }; -use crate::operations::proof_v2::ProveOptions; #[cfg(feature = "full")] #[derive(Debug, Eq, PartialEq, Clone)] @@ -1272,8 +1274,7 @@ mod tests { let path = vec![TEST_LEAF.to_vec()]; let path_query = PathQuery::new(path, SizedQuery::new(query, Some(1000), None)); - db - .query_raw_keys_optional(&path_query, true, true, true, None) + db.query_raw_keys_optional(&path_query, true, true, true, None) .unwrap() .expect_err( "query with subquery should error if error_if_intermediate_path_tree_not_present \ diff --git a/grovedb/src/operations/mod.rs b/grovedb/src/operations/mod.rs index 01f26ae1..49518b96 100644 --- a/grovedb/src/operations/mod.rs +++ b/grovedb/src/operations/mod.rs @@ -10,11 +10,11 @@ pub(crate) mod get; pub mod insert; #[cfg(feature = "full")] pub(crate) mod is_empty_tree; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod proof; +// #[cfg(any(feature = "full", feature = "verify"))] +// pub mod proof; #[cfg(any(feature = "full", feature = "verify"))] -pub mod proof_v2; +pub mod proof; #[cfg(feature = "full")] pub use get::{QueryItemOrSumReturnType, MAX_REFERENCE_HOPS}; diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index f4974bd6..31a4840e 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,62 +1,30 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Generate proof operations -// TODO: entire file is due for a refactor, need some kind of path generator -// that supports multiple implementations for verbose and non-verbose -// generation - use std::collections::BTreeMap; + +use bincode::{Decode, Encode}; +use derive_more::From; use grovedb_costs::{ cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, CostsExt, OperationCost, }; use grovedb_merk::{ - proofs::{encode_into, Node, Op}, + proofs::{ + encode_into, + query::{Key, QueryItem}, + Node, Op, + }, tree::value_hash, - KVIterator, Merk, ProofWithoutEncodingResult, + Merk, ProofWithoutEncodingResult, }; -use grovedb_merk::proofs::query::{Key, Path}; use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; use crate::{ - element::helpers::raw_decode, - operations::proof::util::{ - increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, - write_slice_to_vec, write_to_vec, ProofTokenType, - }, + query_result_type::{BTreeMapLevelResult, BTreeMapLevelResultOrItem, QueryResultType}, reference_path::path_from_reference_path_type, - versioning::{prepend_version_to_bytes, PROOF_VERSION}, - Element, Error, GroveDb, PathQuery, Query, + Element, Error, GroveDb, PathQuery, }; -use crate::query_result_type::{BTreeMapLevelResult, QueryResultType}; #[derive(Debug, Clone, Copy)] pub struct ProveOptions { @@ -73,23 +41,31 @@ impl Default for ProveOptions { } } +#[derive(Encode, Decode)] +pub struct LayerProof { + pub merk_proof: Vec, + pub lower_layers: BTreeMap, +} + +#[derive(Encode, Decode, From)] +pub enum GroveDBProof { + V0(GroveDBProofV0), +} + +#[derive(Encode, Decode)] +pub struct GroveDBProofV0 { + pub root_layer: LayerProof, +} + impl GroveDb { /// Prove one or more path queries. - /// If we more than one path query, we merge into a single path query before - /// proving. - pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { - if query.len() > 1 { - let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query, prove_options) - } else { - self.prove_query(query[0], prove_options) - } - } - - /// Prove one or more path queries verbose. - /// If we more than one path query, we merge into a single path query before - /// proving verbose. - pub fn prove_verbose_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { + /// If we have more than one path query, we merge into a single path query + /// before proving. + pub fn prove_query_many( + &self, + query: Vec<&PathQuery>, + prove_options: Option, + ) -> CostResult, Error> { if query.len() > 1 { let query = cost_return_on_error_default!(PathQuery::merge(query)); self.prove_query(&query, prove_options) @@ -102,550 +78,182 @@ impl GroveDb { /// doesn't allow for subset verification /// Proofs generated with this can only be verified by the path query used /// to generate them. - pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { - self.prove_internal(query, prove_options) + pub fn prove_query( + &self, + query: &PathQuery, + prove_options: Option, + ) -> CostResult, Error> { + self.prove_internal_serialized(query, prove_options) } - /// Generates a verbose or non-verbose proof based on a bool - fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { - let ProveOptions { - is_verbose, multilevel_results - } = prove_options.unwrap_or_default(); + /// Generates a proof and serializes it + fn prove_internal_serialized( + &self, + path_query: &PathQuery, + prove_options: Option, + ) -> CostResult, Error> { let mut cost = OperationCost::default(); + let proof = + cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let encoded_proof = cost_return_on_error_no_add!( + &cost, + bincode::encode_to_vec(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to encode proof {}", e))) + ); + Ok(encoded_proof).wrap_with_cost(cost) + } - if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { - return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); - } - - let mut proof_result = - cost_return_on_error_default!(prepend_version_to_bytes(vec![], PROOF_VERSION)); - - let path_slices = path_query.path.iter().map(|x| x.as_slice()).collect::>(); - - let subtree_exists = self - .check_subtree_exists_path_not_found(path_slices.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - // if the subtree at the given path doesn't exist, prove that this path - // doesn't point to a valid subtree - match subtree_exists { - Ok(_) => { - // subtree exists - // do nothing - } - Err(_) => { - cost_return_on_error!( - &mut cost, - self.generate_and_store_absent_path_proof( - &path_slices, - &mut proof_result, - is_verbose - ) - ); - // return the absence proof no need to continue proof generation - return Ok(proof_result).wrap_with_cost(cost); - } - } + /// Generates a proof + fn prove_internal( + &self, + path_query: &PathQuery, + prove_options: Option, + ) -> CostResult { + let mut cost = OperationCost::default(); - // if the subtree exists and the proof type is verbose we need to insert - // the path information to the proof - if is_verbose { - cost_return_on_error!( - &mut cost, - Self::generate_and_store_path_proof(path_slices.clone(), &mut proof_result) - ); + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { + return Err(Error::InvalidQuery( + "proved path queries can not have offsets", + )) + .wrap_with_cost(cost); } - let mut limit: Option = path_query.query.limit; - - let precomputed_result_map = if !multilevel_results || limit.is_none() { - None - } else { - let result = cost_return_on_error!( - &mut cost, - self.query(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0; - Some(result.to_btree_map_level_results()) - }; - - println!("precomputed results are {:?}", precomputed_result_map); + // we want to query raw because we want the references to not be resolved at + // this point - cost_return_on_error!( + let precomputed_result_map = cost_return_on_error!( &mut cost, - self.prove_subqueries( - &mut proof_result, - path_slices.clone(), + self.query_raw( path_query, - &mut limit, + false, true, - is_verbose, - &precomputed_result_map, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None ) - ); - cost_return_on_error!( + ) + .0 + .to_btree_map_level_results(); + + println!("precomputed results are {:?}", precomputed_result_map); + + let root_layer = cost_return_on_error!( &mut cost, - self.prove_path(&mut proof_result, path_slices, is_verbose) + self.prove_subqueries(vec![], path_query, precomputed_result_map,) ); - Ok(proof_result).wrap_with_cost(cost) + Ok(GroveDBProofV0 { root_layer }.into()).wrap_with_cost(cost) } /// Perform a pre-order traversal of the tree based on the provided /// subqueries fn prove_subqueries( &self, - proofs: &mut Vec, path: Vec<&[u8]>, - query: &PathQuery, - current_limit: &mut Option, - is_first_call: bool, - is_verbose: bool, - precomputed_results: &Option - ) -> CostResult<(), Error> { + path_query: &PathQuery, + layer_precomputed_results: BTreeMapLevelResult, + ) -> CostResult { let mut cost = OperationCost::default(); - let mut to_add_to_result_set: u16 = 0; + + let (query_at_path, left_to_right) = cost_return_on_error_no_add!( + &cost, + path_query + .query_items_at_path(path.as_slice()) + .ok_or(Error::CorruptedPath("path should be part of path_query")) + ); let subtree = cost_return_on_error!( &mut cost, self.open_non_transactional_merk_at_path(path.as_slice().into(), None) ); - if !subtree.has_root_key() { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(()).wrap_with_cost(cost); - } - - let precomputed_items_count = precomputed_results.as_ref().map(|level_results| level_results.len_of_values_at_path(path.as_slice())); - - let reached_limit = current_limit.map_or(false, |limit| limit == 0); - if reached_limit { - if is_first_call { - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - *current_limit, - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&(&[][..])) - ) - ); - } - return Ok(()).wrap_with_cost(cost); - } - - let mut is_leaf_tree = true; - let mut limit_inc = 0; + let limit = layer_precomputed_results.key_values.len(); - let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) - .unwrap_add_cost(&mut cost); - - //let mut elements_to_prove = vec![]; - - while let Some((key, value_bytes)) = kv_iterator.next_kv().unwrap_add_cost(&mut cost) { - let mut encountered_absence = false; - - let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); - match element { - Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { - let (mut subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query(&query.query, &key); - - if subquery_value.is_none() && subquery_path.is_none() { - // this element should be added to the result set - // hence we have to update the limit and offset value - reduce_limit_by(current_limit, 1); - limit_inc += 1; - continue; - } - - if root_key.is_none() { - continue; - } - - // if the element is a non-empty tree then current tree is not a leaf tree - if is_leaf_tree { - let proof_token_type = if precomputed_items_count.is_some() { - ProofTokenType::SizedMerk - } else { - ProofTokenType::Merk - }; - is_leaf_tree = false; - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - precomputed_items_count, - proof_token_type, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) - ) - ); - } - - let mut new_path = path.clone(); - new_path.push(key.as_ref()); - - let mut query = subquery_value; - - if query.is_some() { - if let Some(subquery_path) = &subquery_path { - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None, - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - None, - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); - - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) - .unwrap_add_cost(&mut cost) - .is_err() - { - encountered_absence = true; - break; - } - } - - if encountered_absence { - continue; - } - } - } else if let Some(subquery_path) = &mut subquery_path { - if subquery_path.is_empty() { - // nothing to do on this path, since subquery path is empty - // and there is no consecutive subquery value - continue; - } - - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - None, - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); + let merk_proof = cost_return_on_error!( + &mut cost, + self.generate_merk_proof( + &path.as_slice().into(), + &subtree, + &query_at_path, + left_to_right, + Some(limit as u16), + ) + ); - // check if the new path points to a valid subtree - // if it does not, we should stop proof generation on this path - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) + let lower_layers = cost_return_on_error_no_add!( + &cost, + layer_precomputed_results + .key_values + .into_iter() + .filter_map(|(key, value)| { + match value { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(layer) => { + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + match self + .prove_subqueries(lower_path, path_query, layer) .unwrap_add_cost(&mut cost) - .is_err() { - encountered_absence = true; - break; + Ok(layer_proof) => Some(Ok((key, layer_proof))), + Err(e) => Some(Err(e)), } } - - if encountered_absence { - continue; - } - - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key); - query = Some(key_as_query); - } else { - return Err(Error::CorruptedCodeExecution("subquery_path must exist")) - .wrap_with_cost(cost); - } - - let new_path_owned = new_path.iter().map(|a| a.to_vec()).collect(); - - let new_path_query = PathQuery::new_unsized(new_path_owned, query.unwrap()); - - if self - .check_subtree_exists_path_not_found(new_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost) - .is_err() - { - continue; + BTreeMapLevelResultOrItem::ResultItem(_) => None, } + }) + .collect::, Error>>() + ); - cost_return_on_error!( - &mut cost, - self.prove_subqueries( - proofs, - new_path, - &new_path_query, - current_limit, - false, - is_verbose, - precomputed_results, - ) - ); - - if *current_limit == Some(0) { - break; - } - } - _ => { - to_add_to_result_set += 1; - } - } - } - - if is_leaf_tree { - // if no useful subtree, then we care about the result set of this subtree. - // apply the sized query - increase_limit_by(current_limit, limit_inc); - let limit_offset = cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - *current_limit, - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) - ) - ); - - // update limit - *current_limit = limit_offset; - } else { - reduce_limit_by(current_limit, to_add_to_result_set); - } - - Ok(()).wrap_with_cost(cost) - } - - /// Given a path, construct and append a set of proofs that shows there is - /// a valid path from the root of the db to that point. - fn prove_path( - &self, - proof_result: &mut Vec, - path_slices: Vec<&[u8]>, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - // generate proof to show that the path leads up to the root - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - let subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path(path_slice.into(), None) - ); - let mut query = Query::new(); - query.insert_key(key.to_vec()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path_slice.into(), - &subtree, - &query, - None, - ProofTokenType::Merk, - proof_result, - is_verbose, - path_slice.iter().last().unwrap_or(&Default::default()) - ) - ); - split_path = path_slice.split_last(); - } - Ok(()).wrap_with_cost(cost) + Ok(LayerProof { + merk_proof, + lower_layers, + }) + .wrap_with_cost(cost) } /// Generates query proof given a subtree and appends the result to a proof /// list - fn generate_and_store_merk_proof<'a, S, B>( + fn generate_merk_proof<'a, S, B>( &self, path: &SubtreePath, subtree: &'a Merk, - query: &Query, + query_items: &Vec, + left_to_right: bool, limit: Option, - proof_token_type: ProofTokenType, - proofs: &mut Vec, - is_verbose: bool, - key: &[u8], - ) -> CostResult, Error> + ) -> CostResult, Error> where S: StorageContext<'a> + 'a, B: AsRef<[u8]>, { - if proof_token_type != ProofTokenType::Merk && proof_token_type != ProofTokenType::SizedMerk - { - return Err(Error::InvalidInput( - "expect proof type for merk proof generation to be sized or merk proof type", - )) - .wrap_with_cost(Default::default()); - } - println!("generate_and_store_merk_proof path {:?} query {:?} limit_offset {:?} proof_token_type {}", path.to_vec().into_iter().map(hex::encode).collect::>().join("/"), query, limit, proof_token_type); - let mut cost = OperationCost::default(); - // if the subtree is empty, return the EmptyTree proof op - if !subtree.has_root_key() { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(limit).wrap_with_cost(cost); - } - let mut proof_result = cost_return_on_error_no_add!( &cost, subtree - .prove_without_encoding(query.clone(), limit) + .prove_unchecked_query_items(query_items, limit, left_to_right) + .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) .unwrap() .map_err(|_e| Error::InternalError("failed to generate proof")) ); - cost_return_on_error!(&mut cost, self.post_process_proof(path, &mut proof_result)); + cost_return_on_error!( + &mut cost, + self.post_process_merk_proof(path, &mut proof_result) + ); let mut proof_bytes = Vec::with_capacity(128); encode_into(proof_result.proof.iter(), &mut proof_bytes); - cost_return_on_error_no_add!(&cost, write_to_vec(proofs, &[proof_token_type.into()])); - - // if is verbose, write the key - if is_verbose { - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, key)); - } - - // write the merk proof - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, &proof_bytes)); - - Ok(proof_result.limit).wrap_with_cost(cost) - } - - /// Serializes a path and add it to the proof vector - fn generate_and_store_path_proof( - path: Vec<&[u8]>, - proofs: &mut Vec, - ) -> CostResult<(), Error> { - let cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) - ); - - cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); - - Ok(()).wrap_with_cost(cost) - } - - fn generate_and_store_absent_path_proof( - &self, - path_slices: &[&[u8]], - proof_result: &mut Vec, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) - ); - let mut current_path: Vec<&[u8]> = vec![]; - - let mut split_path = path_slices.split_first(); - while let Some((key, path_slice)) = split_path { - let subtree = self - .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - let Ok(subtree) = subtree else { - break; - }; - - let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); - - let mut next_key_query = Query::new(); - next_key_query.insert_key(key.to_vec()); - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - ¤t_path.as_slice().into(), - &subtree, - &next_key_query, - None, - ProofTokenType::Merk, - proof_result, - is_verbose, - current_path.iter().last().unwrap_or(&(&[][..])) - ) - ); - - current_path.push(key); - - if has_item.is_err() || path_slice.is_empty() { - // reached last key - break; - } - - split_path = path_slice.split_first(); - } - - Ok(()).wrap_with_cost(cost) + Ok(proof_bytes).wrap_with_cost(cost) } /// Converts Items to Node::KV from Node::KVValueHash /// Converts References to Node::KVRefValueHash and sets the value to the /// referenced element - fn post_process_proof>( + fn post_process_merk_proof>( &self, path: &SubtreePath, proof_result: &mut ProofWithoutEncodingResult, @@ -706,233 +314,234 @@ impl GroveDb { Ok(()).wrap_with_cost(cost) } } - -#[cfg(test)] -mod tests { - use grovedb_merk::{execute_proof, proofs::Query}; - use grovedb_storage::StorageBatch; - - use crate::{ - operations::proof::util::{ProofReader, ProofTokenType}, - tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, - GroveDb, - }; - - #[test] - fn test_path_info_encoding_and_decoding() { - let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; - let mut proof_vector = vec![]; - GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) - .unwrap() - .unwrap(); - - let mut proof_reader = ProofReader::new(proof_vector.as_slice()); - let decoded_path = proof_reader.read_path_info().unwrap(); - - assert_eq!(path, decoded_path); - } - - #[test] - fn test_reading_of_verbose_proofs() { - let db = make_deep_tree(); - - let path = vec![TEST_LEAF, b"innertree"]; - let mut query = Query::new(); - query.insert_all(); - - let batch = StorageBatch::new(); - - let merk = db - .open_non_transactional_merk_at_path( - [TEST_LEAF, b"innertree"].as_ref().into(), - Some(&batch), - ) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proof, - true, - b"innertree", - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(b"innertree".to_vec())); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // what is the key is empty?? - let merk = db - .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &EMPTY_PATH, - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proof, - true, - &[], - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(vec![])); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); - } - - #[test] - fn test_reading_verbose_proof_at_key() { - // going to generate an array of multiple proofs with different keys - let db = make_deep_tree(); - let mut proofs = vec![]; - - let mut query = Query::new(); - query.insert_all(); - - // insert all under inner tree - let path = vec![TEST_LEAF, b"innertree"]; - - let batch = StorageBatch::new(); - - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all under innertree4 - let path = vec![TEST_LEAF, b"innertree4"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_4_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all for deeper_1 - let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let deeper_1_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // read the proof at innertree - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at innertree4 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree4") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_4_root_hash); - assert_eq!(result_set.result_set.len(), 2); - - // read the proof at deeper_1 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = - proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, deeper_1_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at an invalid key - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); - assert!(reading_result.is_err()) - } -} +// #[cfg(test)] +// mod tests { +// use grovedb_merk::{execute_proof, proofs::Query}; +// use grovedb_storage::StorageBatch; +// +// use crate::{ +// operations::proof::util::{ProofReader, ProofTokenType}, +// tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, +// GroveDb, +// }; +// +// #[test] +// fn test_path_info_encoding_and_decoding() { +// let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; +// let mut proof_vector = vec![]; +// GroveDb::generate_and_store_path_proof(path.clone(), &mut +// proof_vector) .unwrap() +// .unwrap(); +// +// let mut proof_reader = ProofReader::new(proof_vector.as_slice()); +// let decoded_path = proof_reader.read_path_info().unwrap(); +// +// assert_eq!(path, decoded_path); +// } +// +// #[test] +// fn test_reading_of_verbose_proofs() { +// let db = make_deep_tree(); +// +// let path = vec![TEST_LEAF, b"innertree"]; +// let mut query = Query::new(); +// query.insert_all(); +// +// let batch = StorageBatch::new(); +// +// let merk = db +// .open_non_transactional_merk_at_path( +// [TEST_LEAF, b"innertree"].as_ref().into(), +// Some(&batch), +// ) +// .unwrap() +// .unwrap(); +// let expected_root_hash = merk.root_hash().unwrap(); +// +// let mut proof = vec![]; +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proof, +// true, +// b"innertree", +// ) +// .unwrap() +// .unwrap(); +// assert_ne!(proof.len(), 0); +// +// let mut proof_reader = ProofReader::new(&proof); +// let (proof_token_type, proof, key) = +// proof_reader.read_verbose_proof().unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// assert_eq!(key, Some(b"innertree".to_vec())); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, +// true) .unwrap() +// .unwrap(); +// assert_eq!(root_hash, expected_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // what is the key is empty?? +// let merk = db +// .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) +// .unwrap() +// .unwrap(); +// let expected_root_hash = merk.root_hash().unwrap(); +// +// let mut proof = vec![]; +// db.generate_and_store_merk_proof( +// &EMPTY_PATH, +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proof, +// true, +// &[], +// ) +// .unwrap() +// .unwrap(); +// assert_ne!(proof.len(), 0); +// +// let mut proof_reader = ProofReader::new(&proof); +// let (proof_token_type, proof, key) = +// proof_reader.read_verbose_proof().unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// assert_eq!(key, Some(vec![])); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, +// true) .unwrap() +// .unwrap(); +// assert_eq!(root_hash, expected_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// } +// +// #[test] +// fn test_reading_verbose_proof_at_key() { +// // going to generate an array of multiple proofs with different keys +// let db = make_deep_tree(); +// let mut proofs = vec![]; +// +// let mut query = Query::new(); +// query.insert_all(); +// +// // insert all under inner tree +// let path = vec![TEST_LEAF, b"innertree"]; +// +// let batch = StorageBatch::new(); +// +// let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), +// Some(&batch)) .unwrap() +// .unwrap(); +// let inner_tree_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // insert all under innertree4 +// let path = vec![TEST_LEAF, b"innertree4"]; +// let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), +// Some(&batch)) .unwrap() +// .unwrap(); +// let inner_tree_4_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // insert all for deeper_1 +// let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", +// b"deeper_1"]; let merk = db +// .open_non_transactional_merk_at_path(path.as_slice().into(), +// Some(&batch)) .unwrap() +// .unwrap(); +// let deeper_1_root_hash = merk.root_hash().unwrap(); +// db.generate_and_store_merk_proof( +// &path.as_slice().into(), +// &merk, +// &query, +// None, +// ProofTokenType::Merk, +// &mut proofs, +// true, +// path.iter().last().unwrap_or(&(&[][..])), +// ) +// .unwrap() +// .unwrap(); +// +// // read the proof at innertree +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = proof_reader +// .read_verbose_proof_at_key(b"innertree") +// .unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, +// true) .unwrap() +// .unwrap(); +// assert_eq!(root_hash, inner_tree_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // read the proof at innertree4 +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = proof_reader +// .read_verbose_proof_at_key(b"innertree4") +// .unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, +// true) .unwrap() +// .unwrap(); +// assert_eq!(root_hash, inner_tree_4_root_hash); +// assert_eq!(result_set.result_set.len(), 2); +// +// // read the proof at deeper_1 +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let (proof_token_type, proof) = +// proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); +// +// assert_eq!(proof_token_type, ProofTokenType::Merk); +// +// let (root_hash, result_set) = execute_proof(&proof, &query, None, +// true) .unwrap() +// .unwrap(); +// assert_eq!(root_hash, deeper_1_root_hash); +// assert_eq!(result_set.result_set.len(), 3); +// +// // read the proof at an invalid key +// let contextual_proof = proofs.clone(); +// let mut proof_reader = ProofReader::new(&contextual_proof); +// let reading_result = +// proof_reader.read_verbose_proof_at_key(b"unknown_key"); assert! +// (reading_result.is_err()) } +// } diff --git a/grovedb/src/operations/proof/mod.rs b/grovedb/src/operations/proof/mod.rs index f505b3eb..7b69b586 100644 --- a/grovedb/src/operations/proof/mod.rs +++ b/grovedb/src/operations/proof/mod.rs @@ -1,11 +1,9 @@ //! Proof operations -// #[cfg(feature = "full")] -// mod generate; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] +mod generate; pub mod util; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod verify; +mod verify; -// #[cfg(feature = "full")] -// pub use generate::ProveOptions; +#[cfg(feature = "full")] +pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index c3749a16..6873ce05 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - use std::fmt; #[cfg(any(feature = "full", feature = "verify"))] use std::io::Read; @@ -40,326 +12,7 @@ use grovedb_merk::{ use integer_encoding::{VarInt, VarIntReader}; #[cfg(any(feature = "full", feature = "verify"))] -use crate::Error; -use crate::operations::proof::verify::ProvedKeyValues; - -#[cfg(any(feature = "full", feature = "verify"))] -pub const EMPTY_TREE_HASH: [u8; 32] = [0; 32]; - -pub type ProofTokenInfo = (ProofTokenType, Vec, Option>); - -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, PartialEq, Eq)] -/// Proof type -// TODO: there might be a better name for this -pub enum ProofTokenType { - Merk, - SizedMerk, - EmptyTree, - AbsentPath, - PathInfo, - Invalid, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl fmt::Display for ProofTokenType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let variant_str = match self { - ProofTokenType::Merk => "Merk", - ProofTokenType::SizedMerk => "SizedMerk", - ProofTokenType::EmptyTree => "EmptyTree", - ProofTokenType::AbsentPath => "AbsentPath", - ProofTokenType::PathInfo => "PathInfo", - ProofTokenType::Invalid => "Invalid", - }; - write!(f, "{}", variant_str) - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl From for u8 { - fn from(proof_token_type: ProofTokenType) -> Self { - match proof_token_type { - ProofTokenType::Merk => 0x01, - ProofTokenType::SizedMerk => 0x02, - ProofTokenType::EmptyTree => 0x04, - ProofTokenType::AbsentPath => 0x05, - ProofTokenType::PathInfo => 0x06, - ProofTokenType::Invalid => 0x10, - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl From for ProofTokenType { - fn from(val: u8) -> Self { - match val { - 0x01 => ProofTokenType::Merk, - 0x02 => ProofTokenType::SizedMerk, - 0x04 => ProofTokenType::EmptyTree, - 0x05 => ProofTokenType::AbsentPath, - 0x06 => ProofTokenType::PathInfo, - _ => ProofTokenType::Invalid, - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl ProofTokenType { - pub fn u8_to_display(val: u8) -> String { - match val { - 0x01 => "merk".to_string(), - 0x02 => "sized merk".to_string(), - 0x04 => "empty tree".to_string(), - 0x05 => "absent path".to_string(), - 0x06 => "path info".to_string(), - v => format!("invalid proof token {}", v), - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug)] -// TODO: possibility for a proof writer?? -/// Proof reader -pub struct ProofReader<'a> { - proof_data: &'a [u8], - is_verbose: bool, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl<'a> ProofReader<'a> { - /// New proof reader - pub fn new(proof_data: &'a [u8]) -> Self { - Self { - proof_data, - is_verbose: false, - } - } - - /// New proof reader with verbose_status - pub fn new_with_verbose_status(proof_data: &'a [u8], is_verbose: bool) -> Self { - Self { - proof_data, - is_verbose, - } - } - - /// For non verbose proof read the immediate next proof, for verbose proof - /// read the first proof that matches a given key - pub fn read_next_proof(&mut self, key: &[u8]) -> Result<(ProofTokenType, Vec), Error> { - if self.is_verbose { - self.read_verbose_proof_at_key(key) - } else { - let (proof_token_type, proof, _) = self.read_proof_with_optional_type(None)?; - Ok((proof_token_type, proof)) - } - } - - /// Read the next proof, return the proof type - pub fn read_proof(&mut self) -> Result { - if self.is_verbose { - self.read_verbose_proof_with_optional_type(None) - } else { - self.read_proof_with_optional_type(None) - } - } - - /// Read verbose proof - pub fn read_verbose_proof(&mut self) -> Result { - self.read_verbose_proof_with_optional_type(None) - } - - /// Reads data from proof into slice of specific size - fn read_into_slice(&mut self, buf: &mut [u8]) -> Result { - self.proof_data - .read(buf) - .map_err(|_| Error::CorruptedData(String::from("failed to read proof data"))) - } - - /// Read varint encoded length information from proof data - fn read_length_data(&mut self) -> Result { - self.proof_data - .read_varint() - .map_err(|_| Error::InvalidProof("expected length data".to_string())) - } - - /// Read proof with optional type - pub fn read_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, _) = - self.read_proof_internal_with_optional_type(expected_data_type_option, false)?; - Ok((proof_token_type, proof, None)) - } - - /// Read verbose proof with optional type - pub fn read_verbose_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, key) = - self.read_proof_internal_with_optional_type(expected_data_type_option, true)?; - Ok(( - proof_token_type, - proof, - Some(key.ok_or(Error::InvalidProof( - "key must exist for verbose merk proofs".to_string(), - ))?), - )) - } - - /// Read verbose proof at key - /// Returns an error if it can't find a proof for that key - pub fn read_verbose_proof_at_key( - &mut self, - expected_key: &[u8], - ) -> Result<(ProofTokenType, Vec), Error> { - let (proof_token_type, proof, _) = loop { - let (proof_token_type, proof, key) = self.read_verbose_proof()?; - let key = key.expect("read_verbose_proof enforces that this exists"); - if key.as_slice() == expected_key { - break (proof_token_type, proof, key); - } - }; - - Ok((proof_token_type, proof)) - } - - /// Read proof with optional type - pub fn read_proof_internal_with_optional_type( - &mut self, - expected_data_type_option: Option, - is_verbose: bool, - ) -> Result { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if let Some(expected_data_type) = expected_data_type_option { - if data_type[0] != expected_data_type { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected {}, got {}", - expected_data_type, data_type[0] - ))); - } - } - - let proof_token_type: ProofTokenType = data_type[0].into(); - - if proof_token_type == ProofTokenType::EmptyTree - || proof_token_type == ProofTokenType::AbsentPath - { - return Ok((proof_token_type, vec![], None)); - } - - let (proof, key) = if proof_token_type == ProofTokenType::Merk - || proof_token_type == ProofTokenType::SizedMerk - { - // if verbose we need to read the key first - let key = if is_verbose { - let key_length = self.read_length_data()?; - - let mut key = vec![0; key_length]; - self.read_into_slice(&mut key)?; - - Some(key) - } else { - None - }; - - let proof_length = self.read_length_data()?; - - let mut proof = vec![0; proof_length]; - self.read_into_slice(&mut proof)?; - - (proof, key) - } else { - return Err(Error::InvalidProof( - "expected merk or sized merk proof".to_string(), - )); - }; - - Ok((proof_token_type, proof, key)) - } - - /// Reads path information from the proof vector - pub fn read_path_info(&mut self) -> Result>, Error> { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if data_type != [Into::::into(ProofTokenType::PathInfo)] { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected path_info, got {}", - ProofTokenType::u8_to_display(data_type[0]) - ))); - } - - let mut path = vec![]; - let path_slice_len = self.read_length_data()?; - - for _ in 0..path_slice_len { - let path_len = self.read_length_data()?; - let mut path_value = vec![0; path_len]; - self.read_into_slice(&mut path_value)?; - path.push(path_value); - } - - Ok(path) - } -} - -#[cfg(feature = "full")] -/// Write to vec -// TODO: this can error out handle the error -pub fn write_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - dest.write_all(value) - .map_err(|_e| Error::InternalError("failed to write to vector")) -} - -#[cfg(feature = "full")] -/// Write a slice to the vector, first write the length of the slice -pub fn write_slice_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - write_to_vec(dest, value)?; - Ok(()) -} - -#[cfg(feature = "full")] -/// Write a slice of a slice to a flat vector:w -pub fn write_slice_of_slice_to_slice(dest: &mut W, value: &[&[u8]]) -> Result<(), Error> { - // write the number of slices we are about to write - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - for inner_slice in value { - write_slice_to_vec(dest, inner_slice)?; - } - Ok(()) -} - -#[cfg(any(feature = "full", feature = "verify"))] -pub fn reduce_limit_by( - limit: &mut Option, - n: u16, -) { - if let Some(limit_value) = *limit { - if limit_value > 0 { - if limit_value >= n { - *limit = Some(limit_value - n); - } else { - *limit = Some(0); - } - } - } -} - -pub fn increase_limit_by( - limit: &mut Option, - limit_inc: u16, -) { - if let Some(limit_value) = *limit { - *limit = Some(limit_value + limit_inc); - } -} +pub type ProvedKeyValues = Vec; /// Proved path-key-values pub type ProvedPathKeyValues = Vec; @@ -405,27 +58,7 @@ impl ProvedPathKeyValue { mod tests { use grovedb_merk::proofs::query::ProvedKeyValue; - use crate::operations::proof::util::{ProofTokenType, ProvedPathKeyValue}; - - #[test] - fn test_proof_token_type_encoding() { - assert_eq!(0x01_u8, Into::::into(ProofTokenType::Merk)); - assert_eq!(0x02_u8, Into::::into(ProofTokenType::SizedMerk)); - assert_eq!(0x04_u8, Into::::into(ProofTokenType::EmptyTree)); - assert_eq!(0x05_u8, Into::::into(ProofTokenType::AbsentPath)); - assert_eq!(0x06_u8, Into::::into(ProofTokenType::PathInfo)); - assert_eq!(0x10_u8, Into::::into(ProofTokenType::Invalid)); - } - - #[test] - fn test_proof_token_type_decoding() { - assert_eq!(ProofTokenType::Merk, 0x01_u8.into()); - assert_eq!(ProofTokenType::SizedMerk, 0x02_u8.into()); - assert_eq!(ProofTokenType::EmptyTree, 0x04_u8.into()); - assert_eq!(ProofTokenType::AbsentPath, 0x05_u8.into()); - assert_eq!(ProofTokenType::PathInfo, 0x06_u8.into()); - assert_eq!(ProofTokenType::Invalid, 0x10_u8.into()); - } + use crate::operations::proof::util::ProvedPathKeyValue; #[test] fn test_proved_path_from_single_proved_key_value() { diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index d26ac4ef..a108a05d 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -1,917 +1,381 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Verify proof operations - -use std::{borrow::Cow, collections::BTreeMap}; - -use grovedb_merk::proofs::query::PathKey; -#[cfg(any(feature = "full", feature = "verify"))] -pub use grovedb_merk::proofs::query::{Path, ProvedKeyValue}; -#[cfg(any(feature = "full", feature = "verify"))] -use grovedb_merk::{ - proofs::Query, - tree::{combine_hash, value_hash as value_hash_fn}, - CryptoHash, -}; +use std::collections::BTreeSet; + +use grovedb_merk::{execute_proof, proofs::Query, tree::value_hash}; use crate::{ - operations::proof::util::{ - reduce_limit_by, ProvedPathKeyValue, ProvedPathKeyValues, + operations::proof::{ + generate::{GroveDBProof, GroveDBProofV0, LayerProof}, + util::{ProvedPathKeyValue, ProvedPathKeyValues}, }, query_result_type::PathKeyOptionalElementTrio, - versioning::read_and_consume_proof_version, - SizedQuery, -}; -#[cfg(any(feature = "full", feature = "verify"))] -use crate::{ - operations::proof::util::{ - ProofReader, ProofTokenType, ProofTokenType::AbsentPath, EMPTY_TREE_HASH, - }, Element, Error, GroveDb, PathQuery, }; -#[cfg(any(feature = "full", feature = "verify"))] -pub type ProvedKeyValues = Vec; - -#[cfg(any(feature = "full", feature = "verify"))] -type EncounteredAbsence = bool; - -#[cfg(any(feature = "full", feature = "verify"))] impl GroveDb { - /// Verify proof given a path query - /// Returns the root hash + deserialized elements pub fn verify_query( proof: &[u8], query: &PathQuery, ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? + .0; + + let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, false)?; + + Ok((root_hash, result)) } - /// Verify proof for a given path query returns serialized elements pub fn verify_query_raw( proof: &[u8], query: &PathQuery, ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, false)?; + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? + .0; - Ok((hash, verifier.result_set)) - } + let (root_hash, result) = Self::verify_proof_raw_internal(&grovedb_proof, query, false)?; - /// Verify proof given multiple path queries. - /// If we have more than one path query we merge before performing - /// verification. - pub fn verify_query_many( - proof: &[u8], - query: Vec<&PathQuery>, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - if query.len() > 1 { - let query = PathQuery::merge(query)?; - GroveDb::verify_query_raw(proof, &query) - } else { - GroveDb::verify_query_raw(proof, query[0]) - } + Ok((root_hash, result)) } - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the deserialized result set. - pub fn verify_subset_query( - proof: &[u8], + fn verify_proof_internal( + proof: &GroveDBProof, query: &PathQuery, + is_subset: bool, ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_subset_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) - } - - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the serialized result set. - pub fn verify_subset_query_raw( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, true)?; - Ok((hash, verifier.result_set)) - } - - /// Verify non subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_query_with_absence_proof( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_query) + match proof { + GroveDBProof::V0(proof_v0) => { + Self::verify_proof_internal_v0(proof_v0, query, is_subset) + } + } } - /// Verify subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_subset_query_with_absence_proof( - proof: &[u8], + fn verify_proof_internal_v0( + proof: &GroveDBProofV0, query: &PathQuery, + is_subset: bool, ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_subset_query) + let mut result = Vec::new(); + let root_hash = + Self::verify_layer_proof(&proof.root_layer, query, &[], &mut result, is_subset)?; + Ok((root_hash, result)) } - /// Verifies the proof and returns both elements in the result set and the - /// elements in query but not in state. - /// Note: This only works for certain path queries. - // TODO: We should not care about terminal keys, as theoretically they can be - // infinite we should perform the absence check solely on the proof and the - // given key, this is a temporary solution - fn verify_with_absence_proof( - proof: &[u8], + fn verify_proof_raw_internal( + proof: &GroveDBProof, query: &PathQuery, - verification_fn: T, - ) -> Result<([u8; 32], Vec), Error> - where - T: Fn(&[u8], &PathQuery) -> Result<([u8; 32], Vec), Error>, - { - // must have a limit - let max_results = query.query.limit.ok_or(Error::NotSupported( - "limits must be set in verify_query_with_absence_proof".to_string(), - ))? as usize; - - // must have no offset - if query.query.offset.is_some() { - return Err(Error::NotSupported( - "offsets are not supported for verify_query_with_absence_proof".to_string(), - )); - } - - let terminal_keys = query.terminal_keys(max_results)?; - - // need to actually verify the query - let (root_hash, result_set) = verification_fn(proof, query)?; - - // convert the result set to a btree map - let mut result_set_as_map: BTreeMap> = result_set - .into_iter() - .map(|(path, key, element)| ((path, key), element)) - .collect(); - - let result_set_with_absence: Vec = terminal_keys - .into_iter() - .map(|terminal_key| { - let element = result_set_as_map.remove(&terminal_key).flatten(); - (terminal_key.0, terminal_key.1, element) - }) - .collect(); - - Ok((root_hash, result_set_with_absence)) - } - - /// Verify subset proof with a chain of path query functions. - /// After subset verification with the first path query, the result if - /// passed to the next path query generation function which generates a - /// new path query Apply the new path query, and pass the result to the - /// next ... This is useful for verifying proofs with multiple path - /// queries that depend on one another. - pub fn verify_query_with_chained_path_queries( - proof: &[u8], - first_query: &PathQuery, - chained_path_queries: Vec, - ) -> Result<(CryptoHash, Vec>), Error> - where - C: Fn(Vec) -> Option, - { - let mut results = vec![]; - - let (last_root_hash, elements) = Self::verify_subset_query(proof, first_query)?; - results.push(elements); - - // we should iterate over each chained path queries - for path_query_generator in chained_path_queries { - let new_path_query = path_query_generator(results[results.len() - 1].clone()).ok_or( - Error::InvalidInput("one of the path query generators returns no path query"), - )?; - let (new_root_hash, new_elements) = Self::verify_subset_query(proof, &new_path_query)?; - if new_root_hash != last_root_hash { - return Err(Error::InvalidProof(format!( - "root hash for different path queries do no match, first is {}, this one is {}", - hex::encode(last_root_hash), - hex::encode(new_root_hash) - ))); + is_subset: bool, + ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + match proof { + GroveDBProof::V0(proof_v0) => { + Self::verify_proof_raw_internal_v0(proof_v0, query, is_subset) } - results.push(new_elements); } - - Ok((last_root_hash, results)) } -} -#[cfg(any(feature = "full", feature = "verify"))] -/// Proof verifier -struct ProofVerifier { - limit: Option, - result_set: ProvedPathKeyValues, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl ProofVerifier { - /// New query - pub fn new(query: &PathQuery) -> Self { - ProofVerifier { - limit: query.query.limit, - result_set: vec![], - } + fn verify_proof_raw_internal_v0( + proof: &GroveDBProofV0, + query: &PathQuery, + is_subset: bool, + ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + let mut result = Vec::new(); + let root_hash = + Self::verify_layer_proof_raw(&proof.root_layer, query, &[], &mut result, is_subset)?; + Ok((root_hash, result)) } - /// Execute proof - pub fn execute_proof( - &mut self, - proof: &[u8], + fn verify_layer_proof( + layer_proof: &LayerProof, query: &PathQuery, - is_verbose: bool, + current_path: &[&[u8]], + result: &mut Vec, + is_subset: bool, ) -> Result<[u8; 32], Error> { - let (_proof_version, proof) = read_and_consume_proof_version(proof)?; - let mut proof_reader = ProofReader::new_with_verbose_status(proof, is_verbose); - - let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); - let mut query = Cow::Borrowed(query); - - // TODO: refactor and add better comments - // if verbose, the first thing we want to do is read the path info - if is_verbose { - let original_path = proof_reader.read_path_info()?; - - if original_path == path_slices { - // do nothing - } else if original_path.len() > path_slices.len() { - // TODO: can we relax this constraint - return Err(Error::InvalidProof( - "original path query path must not be greater than the subset path len" - .to_string(), - )); - } else { - let original_path_in_new_path = original_path - .iter() - .all(|key| path_slices.contains(&key.as_slice())); - - if !original_path_in_new_path { - return Err(Error::InvalidProof( - "the original path should be a subset of the subset path".to_string(), - )); - } else { - // We construct a new path query - let path_not_common = path_slices[original_path.len()..].to_vec(); - let mut path_iter = path_not_common.iter(); - - let mut new_query = Query::new(); - if path_iter.len() >= 1 { - new_query - .insert_key(path_iter.next().expect("confirmed has value").to_vec()); - } - - // need to add the first key to the query - new_query.set_subquery_path(path_iter.map(|a| a.to_vec()).collect()); - new_query.set_subquery(query.query.query.clone()); - - query = Cow::Owned(PathQuery::new( - original_path, - SizedQuery::new(new_query, query.query.limit, query.query.offset), - )); + let (query_items, left_to_right) = query + .query_items_at_path(current_path) + .ok_or(Error::CorruptedPath("path should be part of path_query"))?; + + let level_query = Query { + items: query_items.to_vec(), + default_subquery_branch: Default::default(), + conditional_subquery_branches: None, + left_to_right, + }; + + let (root_hash, merk_result) = execute_proof( + &layer_proof.merk_proof, + &level_query, + Some(layer_proof.lower_layers.len() as u16), + left_to_right, + ) + .unwrap() + .map_err(|e| { + eprintln!("{e}"); + Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) + })?; + + let mut verified_keys = BTreeSet::new(); + + for proved_key_value in merk_result.result_set { + let mut path = current_path.to_vec(); + let key = proved_key_value.key; + let value = proved_key_value.value; + path.push(&key); + + verified_keys.insert(key.clone()); + + if let Some(lower_layer) = layer_proof.lower_layers.get(&key) { + let lower_hash = + Self::verify_layer_proof(lower_layer, query, &path, result, is_subset)?; + if lower_hash != value_hash(&value).value { + return Err(Error::InvalidProof("Mismatch in lower layer hash".into())); } + } else { + let element = Element::deserialize(&value)?; + result.push(( + path.iter().map(|p| p.to_vec()).collect(), + key, + Some(element), + )); } } - let (proof_token_type, proof, _) = proof_reader.read_proof()?; - - let root_hash = if proof_token_type == AbsentPath { - self.verify_absent_path(&mut proof_reader, path_slices)? - } else { - let path_owned = query.path.iter().map(|a| a.to_vec()).collect(); - let mut last_subtree_root_hash = self.execute_subquery_proof( - proof_token_type, - proof, - &mut proof_reader, - query.as_ref(), - path_owned, - )?; - - // validate the path elements are connected - self.verify_path_to_root( - query.as_ref(), - query.path.iter().map(|a| a.as_ref()).collect(), - &mut proof_reader, - &mut last_subtree_root_hash, - )? - }; + // if !is_subset { + // // Verify completeness only if not doing subset verification + // self.verify_completeness(&query_items, &merk_result.result_set, + // current_path)?; } Ok(root_hash) } - fn execute_subquery_proof( - &mut self, - proof_token_type: ProofTokenType, - proof: Vec, - proof_reader: &mut ProofReader, + fn verify_layer_proof_raw( + layer_proof: &LayerProof, query: &PathQuery, - path: Path, + current_path: &[&[u8]], + result: &mut ProvedPathKeyValues, + is_subset: bool, ) -> Result<[u8; 32], Error> { - let last_root_hash: [u8; 32]; - - match proof_token_type { - ProofTokenType::SizedMerk => { - // verify proof with limit and offset values - let verification_result = self.execute_merk_proof( - ProofTokenType::SizedMerk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = verification_result.0; - } - ProofTokenType::Merk => { - // for non leaf subtrees, we want to prove that all the queried keys - // have an accompanying proof as long as the limit is non zero - // and their child subtree is not empty - let (proof_root_hash, children) = self.execute_merk_proof( - ProofTokenType::Merk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = proof_root_hash; - let children = children.ok_or(Error::InvalidProof( - "MERK_PROOF always returns a result set".to_string(), - ))?; - - for proved_path_key_value in children { - let ProvedPathKeyValue { - path, - key, - value: value_bytes, - proof: value_hash, - } = proved_path_key_value; - let child_element = Element::deserialize(value_bytes.as_slice())?; - match child_element { - Element::Tree(expected_root_key, _) - | Element::SumTree(expected_root_key, ..) => { - let mut expected_combined_child_hash = value_hash; - let mut current_value_bytes = value_bytes; - - if self.limit == Some(0) { - // we are done verifying the subqueries - break; - } - - let (subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query( - &query.query, - key.as_slice(), - ); - - if subquery_value.is_none() && subquery_path.is_none() { - // add this element to the result set - reduce_limit_by( - &mut self.limit, - 1, - ); - - self.result_set.push( - ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: current_value_bytes, - proof: value_hash, - }, - ), - ); - - continue; - } - - // What is the equivalent for an empty tree - if expected_root_key.is_none() { - // child node is empty, move on to next - continue; - } - - // update the path, we are about to perform a subquery call - let mut new_path = path.to_owned(); - new_path.push(key); - - if subquery_path.is_some() - && !subquery_path.as_ref().unwrap().is_empty() - { - if subquery_value.is_none() { - self.verify_subquery_path( - proof_reader, - ProofTokenType::SizedMerk, - &mut subquery_path.expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - continue; - } else { - let (_, result_set_opt, encountered_absence) = self - .verify_subquery_path( - proof_reader, - ProofTokenType::Merk, - &mut subquery_path - .expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - - if encountered_absence { - // we hit an absence proof while verifying the subquery path - continue; - } - - let subquery_path_result_set = result_set_opt; - if subquery_path_result_set.is_none() { - // this means a sized proof was generated for the subquery - // key - // which is invalid as there exists a subquery value - return Err(Error::InvalidProof( - "expected unsized proof for subquery path as subquery \ - value exists" - .to_string(), - )); - } - let subquery_path_result_set = - subquery_path_result_set.expect("confirmed exists above"); - - if subquery_path_result_set.is_empty() { - // we have a valid proof that shows the absence of the - // subquery path in the tree, hence the subquery value - // cannot be applied, move on to the next. - continue; - } - - Self::update_root_key_from_subquery_path_element( - &mut expected_combined_child_hash, - &mut current_value_bytes, - &subquery_path_result_set, - )?; - } - } - - let new_path_query = - PathQuery::new_unsized(vec![], subquery_value.unwrap()); - - let (child_proof_token_type, child_proof) = proof_reader - .read_next_proof(new_path.last().unwrap_or(&Default::default()))?; - - let child_hash = self.execute_subquery_proof( - child_proof_token_type, - child_proof, - proof_reader, - &new_path_query, - new_path, - )?; - - let combined_child_hash = combine_hash( - value_hash_fn(¤t_value_bytes).value(), - &child_hash, - ) - .value() - .to_owned(); - - if combined_child_hash != expected_combined_child_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_combined_child_hash) - ))); - } - } - _ => { - // encountered a non tree element, we can't apply a subquery to it - // add it to the result set. - if self.limit == Some(0) { - break; - } - - reduce_limit_by(&mut self.limit, 1); - - self.result_set - .push(ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: value_bytes, - proof: value_hash, - }, - )); - } - } - } - } - ProofTokenType::EmptyTree => { - last_root_hash = EMPTY_TREE_HASH; - } - t => { - // execute_subquery_proof only expects proofs for merk trees - // root proof is handled separately - return Err(Error::InvalidProof(format!( - "wrong proof type, expected sized merk, merk or empty tree but got {}", - t - ))); - } - } - Ok(last_root_hash) - } - - /// Deserialize subkey_element and update expected root hash and element - /// value - fn update_root_key_from_subquery_path_element( - expected_child_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - subquery_path_result_set: &[ProvedPathKeyValue], - ) -> Result<(), Error> { - let elem_value = &subquery_path_result_set[0].value; - let subquery_path_element = Element::deserialize(elem_value) - .map_err(|_| Error::CorruptedData("failed to deserialize element".to_string()))?; - match subquery_path_element { - Element::Tree(..) | Element::SumTree(..) => { - *expected_child_hash = subquery_path_result_set[0].proof; - *current_value_bytes = subquery_path_result_set[0].value.to_owned(); - } - e => { - // the means that the subquery path pointed to a non tree - // element, this is not valid as you cannot apply the - // the subquery value to non tree items - return Err(Error::InvalidProof(format!( - "subquery path cannot point to non tree element, got {}", - e.type_str() - ))); - } - } - Ok(()) - } + let (query_items, left_to_right) = query + .query_items_at_path(current_path) + .ok_or(Error::CorruptedPath("path should be part of path_query"))?; + + let level_query = Query { + items: query_items.to_vec(), + default_subquery_branch: Default::default(), + conditional_subquery_branches: None, + left_to_right, + }; - /// Checks that a valid proof showing the existence or absence of the - /// subquery path is present - fn verify_subquery_path( - &mut self, - proof_reader: &mut ProofReader, - expected_proof_token_type: ProofTokenType, - subquery_path: &mut Path, - expected_root_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - current_path: &mut Path, - ) -> Result<(CryptoHash, Option, EncounteredAbsence), Error> { - // the subquery path contains at least one item. - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subquery_key in subquery_path.iter() { - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - // intermediate proofs are all going to be unsized merk proofs - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected MERK proof type for intermediate subquery path keys, got {}", - proof_token_type - ))); - } - match proof_token_type { - ProofTokenType::Merk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(subquery_key.to_owned()); - current_path.push(subquery_key.to_owned()); - - let (proof_root_hash, result_set) = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - // should always be some as we force the proof type to be MERK - debug_assert!(result_set.is_some(), "{}", true); - - // result_set being empty means we could not find the given key in the subtree - // which essentially means an absence proof - if result_set - .as_ref() - .expect("result set should always be some for merk proof type") - .is_empty() - { - return Ok((proof_root_hash, None, true)); - } - - // verify that the elements in the subquery path are linked by root hashes. - let combined_child_hash = - combine_hash(value_hash_fn(current_value_bytes).value(), &proof_root_hash) - .value() - .to_owned(); - - if combined_child_hash != *expected_root_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_root_hash) - ))); - } - - // after confirming they are linked use the latest hash values for subsequent - // checks - Self::update_root_key_from_subquery_path_element( - expected_root_hash, - current_value_bytes, - &result_set.expect("confirmed is some"), - )?; - } - t => { - return Err(Error::InvalidProof(format!( - "expected merk of sized merk proof type for subquery path, got {}", - t - ))); + let (root_hash, merk_result) = execute_proof( + &layer_proof.merk_proof, + &level_query, + Some(layer_proof.lower_layers.len() as u16), + left_to_right, + ) + .unwrap() + .map_err(|e| { + eprintln!("{e}"); + Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) + })?; + + let mut verified_keys = BTreeSet::new(); + + for proved_key_value in merk_result.result_set { + let mut path = current_path.to_vec(); + let key = &proved_key_value.key; + let value = &proved_key_value.value; + path.push(key); + + verified_keys.insert(key.clone()); + + if let Some(lower_layer) = layer_proof.lower_layers.get(key) { + let lower_hash = + Self::verify_layer_proof_raw(lower_layer, query, &path, result, is_subset)?; + if lower_hash != value_hash(value).value { + return Err(Error::InvalidProof("Mismatch in lower layer hash".into())); } + } else { + let path_key_value = ProvedPathKeyValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, + ); + result.push(path_key_value); } } - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - if proof_token_type != expected_proof_token_type { - return Err(Error::InvalidProof(format!( - "unexpected proof type for subquery path, expected {}, got {}", - expected_proof_token_type, proof_token_type - ))); - } + // if !is_subset { + // // Verify completeness only if not doing subset verification + // self.verify_completeness(&query_items, &merk_result.result_set, + // current_path)?; } - match proof_token_type { - ProofTokenType::Merk | ProofTokenType::SizedMerk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key.to_owned()); - - let verification_result = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - current_path.push(last_key); - - Ok((verification_result.0, verification_result.1, false)) - } - t => Err(Error::InvalidProof(format!( - "expected merk or sized merk proof type for subquery path, got {}", - t - ))), - } + Ok(root_hash) } - fn verify_absent_path( - &mut self, - proof_reader: &mut ProofReader, - path_slices: Vec<&[u8]>, - ) -> Result<[u8; 32], Error> { - let mut root_key_hash = None; - let mut expected_child_hash = None; - let mut last_result_set: ProvedPathKeyValues = vec![]; - - for key in path_slices { - let (proof_token_type, merk_proof, _) = proof_reader.read_proof()?; - if proof_token_type == ProofTokenType::EmptyTree { - // when we encounter the empty tree op, we need to ensure - // that the expected tree hash is the combination of the - // Element_value_hash and the empty root hash [0; 32] - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &[0; 32], - ) - .unwrap(); - if Some(combined_hash) != expected_child_hash { - return Err(Error::InvalidProof( - "proof invalid: could not verify empty subtree while generating absent \ - path proof" - .to_string(), - )); - } else { - last_result_set = vec![]; - break; - } - } else if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected a merk proof for absent path, got {}", - proof_token_type - ))); - } - - let mut child_query = Query::new(); - child_query.insert_key(key.to_vec()); - - // TODO: don't pass empty vec - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &merk_proof, - &child_query, - true, - // cannot return a result set - Vec::new(), - )?; - - if let Some(expected_child_hash) = expected_child_hash { - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &proof_result.0, - ) - .value() - .to_owned(); - if combined_hash != expected_child_hash { - return Err(Error::InvalidProof(format!( - "proof invalid: invalid parent, expected {}, got {}", - hex::encode(expected_child_hash), - hex::encode(combined_hash) - ))); - } - } else { - root_key_hash = Some(proof_result.0); - } + // fn verify_completeness( + // &self, + // query_items: &[QueryItem], + // result_set: &[ProvedKeyValue], + // current_path: &[Vec], + // ) -> Result<(), Error> { + // let mut result_iter = result_set.iter().peekable(); + // + // for query_item in query_items { + // match query_item { + // QueryItem::Key(key) => { + // if !self.verify_key_completeness(key, &mut result_iter)? { + // return Err(Error::InvalidProof(format!( + // "Key {:?} is missing and its absence is not proven", + // hex::encode(key) + // ))); + // } + // }, + // QueryItem::Range(range) => { + // self.verify_range_completeness(range, &mut result_iter)?; + // }, + // QueryItem::RangeInclusive(range) => { + // self.verify_range_inclusive_completeness(range, &mut + // result_iter)?; }, + // // Add cases for other QueryItem variants as needed + // _ => return Err(Error::InvalidProof("Unsupported query item + // type".into())), } + // } + // + // // Ensure we've consumed all results + // if result_iter.peek().is_some() { + // return Err(Error::InvalidProof("Proof contains extra, unexpected + // results".into())); } + // + // Ok(()) + // } + // + // fn verify_key_completeness( + // &self, + // key: &[u8], + // result_iter: &mut std::iter::Peekable>, ) -> Result { + // if let Some(result) = result_iter.peek() { + // if result.key == key { + // result_iter.next(); // Consume the result + // Ok(true) + // } else if result.key > key { + // // The key is missing, but this is okay as long as we can prove + // its absence self.verify_key_absence(key, result) + // } else { + // // This shouldn't happen if the result set is properly ordered + // Err(Error::InvalidProof("Result set is not properly + // ordered".into())) } + // } else { + // // We've run out of results, need to prove absence + // Err(Error::InvalidProof("Ran out of results unexpectedly".into())) + // } + // } + // + // fn verify_range_completeness( + // &self, + // range: &Range>, + // result_iter: &mut std::iter::Peekable>, ) -> Result<(), Error> { + // let mut current = range.start.clone(); + // while current < range.end { + // if !self.verify_key_completeness(¤t, result_iter)? { + // return Err(Error::InvalidProof(format!( + // "Key {:?} in range is missing and its absence is not proven", + // hex::encode(¤t) + // ))); + // } + // // Move to next key. This is a simplified approach and might need to + // be adjusted // based on your key structure. + // current = increment_key(¤t); + // } + // Ok(()) + // } + // + // fn verify_range_inclusive_completeness( + // &self, + // range: &RangeInclusive>, + // result_iter: &mut std::iter::Peekable>, ) -> Result<(), Error> { + // let mut current = range.start().clone(); + // while current <= *range.end() { + // if !self.verify_key_completeness(¤t, result_iter)? { + // return Err(Error::InvalidProof(format!( + // "Key {:?} in inclusive range is missing and its absence is + // not proven", hex::encode(¤t) + // ))); + // } + // // Move to next key. This is a simplified approach and might need to + // be adjusted // based on your key structure. + // current = increment_key(¤t); + // } + // Ok(()) + // } + // + // fn verify_key_absence( + // &self, + // key: &[u8], + // next_result: &ProvedKeyValue, + // ) -> Result { + // // This function should implement the logic to verify that a key's + // absence is proven // The exact implementation will depend on how your + // system proves absences // This might involve checking the hash of the + // next present key, verifying that // there's no possible key between + // the absent key and the next present key, etc. + // + // // For now, we'll just return Ok(false) as a placeholder + // Ok(false) + // } + // + // fn increment_key(key: &[u8]) -> Vec { + // // This is a very simplified key incrementing function + // // You might need a more sophisticated approach depending on your key + // structure let mut new_key = key.to_vec(); + // for byte in new_key.iter_mut().rev() { + // if *byte == 255 { + // *byte = 0; + // } else { + // *byte += 1; + // break; + // } + // } + // new_key + // } - last_result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if last_result_set.is_empty() { - // if result set is empty then we have reached the absence point, break - break; - } + pub fn verify_subset_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? + .0; - let elem = Element::deserialize(last_result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(Some(last_result_set[0].proof)), - e => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - e.type_str() - ))), - }?; - expected_child_hash = child_hash; - } + let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, true)?; - if last_result_set.is_empty() { - if let Some(hash) = root_key_hash { - Ok(hash) - } else { - Err(Error::InvalidProof( - "proof invalid: no non root tree found".to_string(), - )) - } - } else { - Err(Error::InvalidProof( - "proof invalid: path not absent".to_string(), - )) - } + Ok((root_hash, result)) } - /// Verifies that the correct proof was provided to confirm the path in - /// query - fn verify_path_to_root( - &mut self, + pub fn verify_query_with_absence_proof( + proof: &[u8], query: &PathQuery, - path_slices: Vec<&[u8]>, - proof_reader: &mut ProofReader, - expected_root_hash: &mut [u8; 32], - ) -> Result<[u8; 32], Error> { - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - // for every subtree, there should be a corresponding proof for the parent - // which should prove that this subtree is a child of the parent tree - let (proof_token_type, parent_merk_proof) = - proof_reader.read_next_proof(path_slice.last().unwrap_or(&Default::default()))?; - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "wrong data_type expected Merk Proof, got {}", - proof_token_type - ))); - } - - let mut parent_query = Query::new(); - parent_query.insert_key(key.to_vec()); - - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &parent_merk_proof, - &parent_query, - query.query.query.left_to_right, - // TODO: don't pass empty vec - Vec::new(), - )?; - - let result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if result_set.is_empty() || &result_set[0].key != key { - return Err(Error::InvalidProof( - "proof invalid: invalid parent".to_string(), - )); - } - - let elem = Element::deserialize(result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(result_set[0].proof), - t => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - t.type_str() - ))), - }?; - - let combined_root_hash = combine_hash( - value_hash_fn(&result_set[0].value).value(), - expected_root_hash, - ) - .value() - .to_owned(); - if child_hash != combined_root_hash { - return Err(Error::InvalidProof(format!( - "Bad path: tree hash does not have expected hash, got {}, expected {}", - hex::encode(child_hash), - hex::encode(combined_root_hash) - ))); - } - - *expected_root_hash = proof_result.0; - - split_path = path_slice.split_last(); - } - - Ok(*expected_root_hash) + ) -> Result<([u8; 32], Vec), Error> { + // This is now handled within verify_proof_internal + Self::verify_query(proof, query) } - /// Execute a merk proof, update the state when a sized proof is - /// encountered i.e. update the limit, offset and result set values - fn execute_merk_proof( - &mut self, - proof_token_type: ProofTokenType, + pub fn verify_subset_query_with_absence_proof( proof: &[u8], - query: &Query, - left_to_right: bool, - path: Path, - ) -> Result<(CryptoHash, Option), Error> { - let is_sized_proof = proof_token_type == ProofTokenType::SizedMerk; - let mut limit = None; - - if is_sized_proof { - limit = self.limit; - } - - let (hash, result) = - grovedb_merk::execute_proof(proof, query, limit, left_to_right) - .unwrap() - .map_err(|e| { - eprintln!("{e}"); - Error::InvalidProof("invalid proof verification parameters".to_string()) - })?; - - // convert the result set to proved_path_key_values - let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path, result.result_set); - - if is_sized_proof { - self.limit = result.limit; - self.result_set.extend(proved_path_key_values); - Ok((hash, None)) - } else { - Ok((hash, Some(proved_path_key_values))) - } + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + // Subset queries don't verify absence, so this is the same as + // verify_subset_query + Self::verify_subset_query(proof, query) } } diff --git a/grovedb/src/operations/proof_old/generate.rs b/grovedb/src/operations/proof_old/generate.rs new file mode 100644 index 00000000..f4974bd6 --- /dev/null +++ b/grovedb/src/operations/proof_old/generate.rs @@ -0,0 +1,938 @@ +// MIT LICENSE +// +// Copyright (c) 2021 Dash Core Group +// +// Permission is hereby granted, free of charge, to any +// person obtaining a copy of this software and associated +// documentation files (the "Software"), to deal in the +// Software without restriction, including without +// limitation the rights to use, copy, modify, merge, +// publish, distribute, sublicense, and/or sell copies of +// the Software, and to permit persons to whom the Software +// is furnished to do so, subject to the following +// conditions: +// +// The above copyright notice and this permission notice +// shall be included in all copies or substantial portions +// of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +// DEALINGS IN THE SOFTWARE. + +//! Generate proof operations + +// TODO: entire file is due for a refactor, need some kind of path generator +// that supports multiple implementations for verbose and non-verbose +// generation + +use std::collections::BTreeMap; +use grovedb_costs::{ + cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, + CostsExt, OperationCost, +}; +use grovedb_merk::{ + proofs::{encode_into, Node, Op}, + tree::value_hash, + KVIterator, Merk, ProofWithoutEncodingResult, +}; +use grovedb_merk::proofs::query::{Key, Path}; +use grovedb_path::SubtreePath; +use grovedb_storage::StorageContext; + +use crate::{ + element::helpers::raw_decode, + operations::proof::util::{ + increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, + write_slice_to_vec, write_to_vec, ProofTokenType, + }, + reference_path::path_from_reference_path_type, + versioning::{prepend_version_to_bytes, PROOF_VERSION}, + Element, Error, GroveDb, PathQuery, Query, +}; +use crate::query_result_type::{BTreeMapLevelResult, QueryResultType}; + +#[derive(Debug, Clone, Copy)] +pub struct ProveOptions { + pub is_verbose: bool, + pub multilevel_results: bool, +} + +impl Default for ProveOptions { + fn default() -> Self { + ProveOptions { + is_verbose: false, + multilevel_results: false, + } + } +} + +impl GroveDb { + /// Prove one or more path queries. + /// If we more than one path query, we merge into a single path query before + /// proving. + pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { + if query.len() > 1 { + let query = cost_return_on_error_default!(PathQuery::merge(query)); + self.prove_query(&query, prove_options) + } else { + self.prove_query(query[0], prove_options) + } + } + + /// Prove one or more path queries verbose. + /// If we more than one path query, we merge into a single path query before + /// proving verbose. + pub fn prove_verbose_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { + if query.len() > 1 { + let query = cost_return_on_error_default!(PathQuery::merge(query)); + self.prove_query(&query, prove_options) + } else { + self.prove_query(query[0], prove_options) + } + } + + /// Generate a minimalistic proof for a given path query + /// doesn't allow for subset verification + /// Proofs generated with this can only be verified by the path query used + /// to generate them. + pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { + self.prove_internal(query, prove_options) + } + + /// Generates a verbose or non-verbose proof based on a bool + fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { + let ProveOptions { + is_verbose, multilevel_results + } = prove_options.unwrap_or_default(); + let mut cost = OperationCost::default(); + + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { + return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); + } + + let mut proof_result = + cost_return_on_error_default!(prepend_version_to_bytes(vec![], PROOF_VERSION)); + + let path_slices = path_query.path.iter().map(|x| x.as_slice()).collect::>(); + + let subtree_exists = self + .check_subtree_exists_path_not_found(path_slices.as_slice().into(), None) + .unwrap_add_cost(&mut cost); + + // if the subtree at the given path doesn't exist, prove that this path + // doesn't point to a valid subtree + match subtree_exists { + Ok(_) => { + // subtree exists + // do nothing + } + Err(_) => { + cost_return_on_error!( + &mut cost, + self.generate_and_store_absent_path_proof( + &path_slices, + &mut proof_result, + is_verbose + ) + ); + // return the absence proof no need to continue proof generation + return Ok(proof_result).wrap_with_cost(cost); + } + } + + // if the subtree exists and the proof type is verbose we need to insert + // the path information to the proof + if is_verbose { + cost_return_on_error!( + &mut cost, + Self::generate_and_store_path_proof(path_slices.clone(), &mut proof_result) + ); + } + + let mut limit: Option = path_query.query.limit; + + let precomputed_result_map = if !multilevel_results || limit.is_none() { + None + } else { + let result = cost_return_on_error!( + &mut cost, + self.query(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0; + Some(result.to_btree_map_level_results()) + }; + + println!("precomputed results are {:?}", precomputed_result_map); + + cost_return_on_error!( + &mut cost, + self.prove_subqueries( + &mut proof_result, + path_slices.clone(), + path_query, + &mut limit, + true, + is_verbose, + &precomputed_result_map, + ) + ); + cost_return_on_error!( + &mut cost, + self.prove_path(&mut proof_result, path_slices, is_verbose) + ); + + Ok(proof_result).wrap_with_cost(cost) + } + + /// Perform a pre-order traversal of the tree based on the provided + /// subqueries + fn prove_subqueries( + &self, + proofs: &mut Vec, + path: Vec<&[u8]>, + query: &PathQuery, + current_limit: &mut Option, + is_first_call: bool, + is_verbose: bool, + precomputed_results: &Option + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + let mut to_add_to_result_set: u16 = 0; + + let subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path(path.as_slice().into(), None) + ); + if !subtree.has_root_key() { + cost_return_on_error_no_add!( + &cost, + write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) + ); + return Ok(()).wrap_with_cost(cost); + } + + let precomputed_items_count = precomputed_results.as_ref().map(|level_results| level_results.len_of_values_at_path(path.as_slice())); + + let reached_limit = current_limit.map_or(false, |limit| limit == 0); + if reached_limit { + if is_first_call { + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &path.as_slice().into(), + &subtree, + &query.query.query, + *current_limit, + ProofTokenType::SizedMerk, + proofs, + is_verbose, + path.iter().last().unwrap_or(&(&[][..])) + ) + ); + } + return Ok(()).wrap_with_cost(cost); + } + + let mut is_leaf_tree = true; + + let mut limit_inc = 0; + + let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) + .unwrap_add_cost(&mut cost); + + //let mut elements_to_prove = vec![]; + + while let Some((key, value_bytes)) = kv_iterator.next_kv().unwrap_add_cost(&mut cost) { + let mut encountered_absence = false; + + let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); + match element { + Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { + let (mut subquery_path, subquery_value) = + Element::subquery_paths_and_value_for_sized_query(&query.query, &key); + + if subquery_value.is_none() && subquery_path.is_none() { + // this element should be added to the result set + // hence we have to update the limit and offset value + reduce_limit_by(current_limit, 1); + limit_inc += 1; + continue; + } + + if root_key.is_none() { + continue; + } + + // if the element is a non-empty tree then current tree is not a leaf tree + if is_leaf_tree { + let proof_token_type = if precomputed_items_count.is_some() { + ProofTokenType::SizedMerk + } else { + ProofTokenType::Merk + }; + is_leaf_tree = false; + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &path.as_slice().into(), + &subtree, + &query.query.query, + precomputed_items_count, + proof_token_type, + proofs, + is_verbose, + path.iter().last().unwrap_or(&Default::default()) + ) + ); + } + + let mut new_path = path.clone(); + new_path.push(key.as_ref()); + + let mut query = subquery_value; + + if query.is_some() { + if let Some(subquery_path) = &subquery_path { + for subkey in subquery_path.iter() { + let inner_subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path( + new_path.as_slice().into(), + None, + ) + ); + + let mut key_as_query = Query::new(); + key_as_query.insert_key(subkey.clone()); + + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &new_path.as_slice().into(), + &inner_subtree, + &key_as_query, + None, + ProofTokenType::Merk, + proofs, + is_verbose, + new_path.iter().last().unwrap_or(&Default::default()) + ) + ); + + new_path.push(subkey); + + if self + .check_subtree_exists_path_not_found( + new_path.as_slice().into(), + None, + ) + .unwrap_add_cost(&mut cost) + .is_err() + { + encountered_absence = true; + break; + } + } + + if encountered_absence { + continue; + } + } + } else if let Some(subquery_path) = &mut subquery_path { + if subquery_path.is_empty() { + // nothing to do on this path, since subquery path is empty + // and there is no consecutive subquery value + continue; + } + + let last_key = subquery_path.remove(subquery_path.len() - 1); + + for subkey in subquery_path.iter() { + let inner_subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path( + new_path.as_slice().into(), + None + ) + ); + + let mut key_as_query = Query::new(); + key_as_query.insert_key(subkey.clone()); + + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &new_path.as_slice().into(), + &inner_subtree, + &key_as_query, + None, + ProofTokenType::Merk, + proofs, + is_verbose, + new_path.iter().last().unwrap_or(&Default::default()) + ) + ); + + new_path.push(subkey); + + // check if the new path points to a valid subtree + // if it does not, we should stop proof generation on this path + if self + .check_subtree_exists_path_not_found( + new_path.as_slice().into(), + None, + ) + .unwrap_add_cost(&mut cost) + .is_err() + { + encountered_absence = true; + break; + } + } + + if encountered_absence { + continue; + } + + let mut key_as_query = Query::new(); + key_as_query.insert_key(last_key); + query = Some(key_as_query); + } else { + return Err(Error::CorruptedCodeExecution("subquery_path must exist")) + .wrap_with_cost(cost); + } + + let new_path_owned = new_path.iter().map(|a| a.to_vec()).collect(); + + let new_path_query = PathQuery::new_unsized(new_path_owned, query.unwrap()); + + if self + .check_subtree_exists_path_not_found(new_path.as_slice().into(), None) + .unwrap_add_cost(&mut cost) + .is_err() + { + continue; + } + + cost_return_on_error!( + &mut cost, + self.prove_subqueries( + proofs, + new_path, + &new_path_query, + current_limit, + false, + is_verbose, + precomputed_results, + ) + ); + + if *current_limit == Some(0) { + break; + } + } + _ => { + to_add_to_result_set += 1; + } + } + } + + if is_leaf_tree { + // if no useful subtree, then we care about the result set of this subtree. + // apply the sized query + increase_limit_by(current_limit, limit_inc); + let limit_offset = cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &path.as_slice().into(), + &subtree, + &query.query.query, + *current_limit, + ProofTokenType::SizedMerk, + proofs, + is_verbose, + path.iter().last().unwrap_or(&Default::default()) + ) + ); + + // update limit + *current_limit = limit_offset; + } else { + reduce_limit_by(current_limit, to_add_to_result_set); + } + + Ok(()).wrap_with_cost(cost) + } + + /// Given a path, construct and append a set of proofs that shows there is + /// a valid path from the root of the db to that point. + fn prove_path( + &self, + proof_result: &mut Vec, + path_slices: Vec<&[u8]>, + is_verbose: bool, + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + + // generate proof to show that the path leads up to the root + let mut split_path = path_slices.split_last(); + while let Some((key, path_slice)) = split_path { + let subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path(path_slice.into(), None) + ); + let mut query = Query::new(); + query.insert_key(key.to_vec()); + + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + &path_slice.into(), + &subtree, + &query, + None, + ProofTokenType::Merk, + proof_result, + is_verbose, + path_slice.iter().last().unwrap_or(&Default::default()) + ) + ); + split_path = path_slice.split_last(); + } + Ok(()).wrap_with_cost(cost) + } + + /// Generates query proof given a subtree and appends the result to a proof + /// list + fn generate_and_store_merk_proof<'a, S, B>( + &self, + path: &SubtreePath, + subtree: &'a Merk, + query: &Query, + limit: Option, + proof_token_type: ProofTokenType, + proofs: &mut Vec, + is_verbose: bool, + key: &[u8], + ) -> CostResult, Error> + where + S: StorageContext<'a> + 'a, + B: AsRef<[u8]>, + { + if proof_token_type != ProofTokenType::Merk && proof_token_type != ProofTokenType::SizedMerk + { + return Err(Error::InvalidInput( + "expect proof type for merk proof generation to be sized or merk proof type", + )) + .wrap_with_cost(Default::default()); + } + println!("generate_and_store_merk_proof path {:?} query {:?} limit_offset {:?} proof_token_type {}", path.to_vec().into_iter().map(hex::encode).collect::>().join("/"), query, limit, proof_token_type); + + let mut cost = OperationCost::default(); + + // if the subtree is empty, return the EmptyTree proof op + if !subtree.has_root_key() { + cost_return_on_error_no_add!( + &cost, + write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) + ); + return Ok(limit).wrap_with_cost(cost); + } + + let mut proof_result = cost_return_on_error_no_add!( + &cost, + subtree + .prove_without_encoding(query.clone(), limit) + .unwrap() + .map_err(|_e| Error::InternalError("failed to generate proof")) + ); + + cost_return_on_error!(&mut cost, self.post_process_proof(path, &mut proof_result)); + + let mut proof_bytes = Vec::with_capacity(128); + encode_into(proof_result.proof.iter(), &mut proof_bytes); + + cost_return_on_error_no_add!(&cost, write_to_vec(proofs, &[proof_token_type.into()])); + + // if is verbose, write the key + if is_verbose { + cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, key)); + } + + // write the merk proof + cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, &proof_bytes)); + + Ok(proof_result.limit).wrap_with_cost(cost) + } + + /// Serializes a path and add it to the proof vector + fn generate_and_store_path_proof( + path: Vec<&[u8]>, + proofs: &mut Vec, + ) -> CostResult<(), Error> { + let cost = OperationCost::default(); + + cost_return_on_error_no_add!( + &cost, + write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) + ); + + cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); + + Ok(()).wrap_with_cost(cost) + } + + fn generate_and_store_absent_path_proof( + &self, + path_slices: &[&[u8]], + proof_result: &mut Vec, + is_verbose: bool, + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + + cost_return_on_error_no_add!( + &cost, + write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) + ); + let mut current_path: Vec<&[u8]> = vec![]; + + let mut split_path = path_slices.split_first(); + while let Some((key, path_slice)) = split_path { + let subtree = self + .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) + .unwrap_add_cost(&mut cost); + + let Ok(subtree) = subtree else { + break; + }; + + let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); + + let mut next_key_query = Query::new(); + next_key_query.insert_key(key.to_vec()); + cost_return_on_error!( + &mut cost, + self.generate_and_store_merk_proof( + ¤t_path.as_slice().into(), + &subtree, + &next_key_query, + None, + ProofTokenType::Merk, + proof_result, + is_verbose, + current_path.iter().last().unwrap_or(&(&[][..])) + ) + ); + + current_path.push(key); + + if has_item.is_err() || path_slice.is_empty() { + // reached last key + break; + } + + split_path = path_slice.split_first(); + } + + Ok(()).wrap_with_cost(cost) + } + + /// Converts Items to Node::KV from Node::KVValueHash + /// Converts References to Node::KVRefValueHash and sets the value to the + /// referenced element + fn post_process_proof>( + &self, + path: &SubtreePath, + proof_result: &mut ProofWithoutEncodingResult, + ) -> CostResult<(), Error> { + let mut cost = OperationCost::default(); + + for op in proof_result.proof.iter_mut() { + match op { + Op::Push(node) | Op::PushInverted(node) => match node { + Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { + let elem = Element::deserialize(value); + match elem { + Ok(Element::Reference(reference_path, ..)) => { + let absolute_path = cost_return_on_error!( + &mut cost, + path_from_reference_path_type( + reference_path, + &path.to_vec(), + Some(key.as_slice()) + ) + .wrap_with_cost(OperationCost::default()) + ); + + let referenced_elem = cost_return_on_error!( + &mut cost, + self.follow_reference( + absolute_path.as_slice().into(), + true, + None + ) + ); + + let serialized_referenced_elem = referenced_elem.serialize(); + if serialized_referenced_elem.is_err() { + return Err(Error::CorruptedData(String::from( + "unable to serialize element", + ))) + .wrap_with_cost(cost); + } + + *node = Node::KVRefValueHash( + key.to_owned(), + serialized_referenced_elem.expect("confirmed ok above"), + value_hash(value).unwrap_add_cost(&mut cost), + ) + } + Ok(Element::Item(..)) => { + *node = Node::KV(key.to_owned(), value.to_owned()) + } + _ => continue, + } + } + _ => continue, + }, + _ => continue, + } + } + Ok(()).wrap_with_cost(cost) + } +} + +#[cfg(test)] +mod tests { + use grovedb_merk::{execute_proof, proofs::Query}; + use grovedb_storage::StorageBatch; + + use crate::{ + operations::proof::util::{ProofReader, ProofTokenType}, + tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, + GroveDb, + }; + + #[test] + fn test_path_info_encoding_and_decoding() { + let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; + let mut proof_vector = vec![]; + GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) + .unwrap() + .unwrap(); + + let mut proof_reader = ProofReader::new(proof_vector.as_slice()); + let decoded_path = proof_reader.read_path_info().unwrap(); + + assert_eq!(path, decoded_path); + } + + #[test] + fn test_reading_of_verbose_proofs() { + let db = make_deep_tree(); + + let path = vec![TEST_LEAF, b"innertree"]; + let mut query = Query::new(); + query.insert_all(); + + let batch = StorageBatch::new(); + + let merk = db + .open_non_transactional_merk_at_path( + [TEST_LEAF, b"innertree"].as_ref().into(), + Some(&batch), + ) + .unwrap() + .unwrap(); + let expected_root_hash = merk.root_hash().unwrap(); + + let mut proof = vec![]; + db.generate_and_store_merk_proof( + &path.as_slice().into(), + &merk, + &query, + None, + ProofTokenType::Merk, + &mut proof, + true, + b"innertree", + ) + .unwrap() + .unwrap(); + assert_ne!(proof.len(), 0); + + let mut proof_reader = ProofReader::new(&proof); + let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); + + assert_eq!(proof_token_type, ProofTokenType::Merk); + assert_eq!(key, Some(b"innertree".to_vec())); + + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) + .unwrap() + .unwrap(); + assert_eq!(root_hash, expected_root_hash); + assert_eq!(result_set.result_set.len(), 3); + + // what is the key is empty?? + let merk = db + .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) + .unwrap() + .unwrap(); + let expected_root_hash = merk.root_hash().unwrap(); + + let mut proof = vec![]; + db.generate_and_store_merk_proof( + &EMPTY_PATH, + &merk, + &query, + None, + ProofTokenType::Merk, + &mut proof, + true, + &[], + ) + .unwrap() + .unwrap(); + assert_ne!(proof.len(), 0); + + let mut proof_reader = ProofReader::new(&proof); + let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); + + assert_eq!(proof_token_type, ProofTokenType::Merk); + assert_eq!(key, Some(vec![])); + + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) + .unwrap() + .unwrap(); + assert_eq!(root_hash, expected_root_hash); + assert_eq!(result_set.result_set.len(), 3); + } + + #[test] + fn test_reading_verbose_proof_at_key() { + // going to generate an array of multiple proofs with different keys + let db = make_deep_tree(); + let mut proofs = vec![]; + + let mut query = Query::new(); + query.insert_all(); + + // insert all under inner tree + let path = vec![TEST_LEAF, b"innertree"]; + + let batch = StorageBatch::new(); + + let merk = db + .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) + .unwrap() + .unwrap(); + let inner_tree_root_hash = merk.root_hash().unwrap(); + db.generate_and_store_merk_proof( + &path.as_slice().into(), + &merk, + &query, + None, + ProofTokenType::Merk, + &mut proofs, + true, + path.iter().last().unwrap_or(&(&[][..])), + ) + .unwrap() + .unwrap(); + + // insert all under innertree4 + let path = vec![TEST_LEAF, b"innertree4"]; + let merk = db + .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) + .unwrap() + .unwrap(); + let inner_tree_4_root_hash = merk.root_hash().unwrap(); + db.generate_and_store_merk_proof( + &path.as_slice().into(), + &merk, + &query, + None, + ProofTokenType::Merk, + &mut proofs, + true, + path.iter().last().unwrap_or(&(&[][..])), + ) + .unwrap() + .unwrap(); + + // insert all for deeper_1 + let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; + let merk = db + .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) + .unwrap() + .unwrap(); + let deeper_1_root_hash = merk.root_hash().unwrap(); + db.generate_and_store_merk_proof( + &path.as_slice().into(), + &merk, + &query, + None, + ProofTokenType::Merk, + &mut proofs, + true, + path.iter().last().unwrap_or(&(&[][..])), + ) + .unwrap() + .unwrap(); + + // read the proof at innertree + let contextual_proof = proofs.clone(); + let mut proof_reader = ProofReader::new(&contextual_proof); + let (proof_token_type, proof) = proof_reader + .read_verbose_proof_at_key(b"innertree") + .unwrap(); + + assert_eq!(proof_token_type, ProofTokenType::Merk); + + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) + .unwrap() + .unwrap(); + assert_eq!(root_hash, inner_tree_root_hash); + assert_eq!(result_set.result_set.len(), 3); + + // read the proof at innertree4 + let contextual_proof = proofs.clone(); + let mut proof_reader = ProofReader::new(&contextual_proof); + let (proof_token_type, proof) = proof_reader + .read_verbose_proof_at_key(b"innertree4") + .unwrap(); + + assert_eq!(proof_token_type, ProofTokenType::Merk); + + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) + .unwrap() + .unwrap(); + assert_eq!(root_hash, inner_tree_4_root_hash); + assert_eq!(result_set.result_set.len(), 2); + + // read the proof at deeper_1 + let contextual_proof = proofs.clone(); + let mut proof_reader = ProofReader::new(&contextual_proof); + let (proof_token_type, proof) = + proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); + + assert_eq!(proof_token_type, ProofTokenType::Merk); + + let (root_hash, result_set) = execute_proof(&proof, &query, None, true) + .unwrap() + .unwrap(); + assert_eq!(root_hash, deeper_1_root_hash); + assert_eq!(result_set.result_set.len(), 3); + + // read the proof at an invalid key + let contextual_proof = proofs.clone(); + let mut proof_reader = ProofReader::new(&contextual_proof); + let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); + assert!(reading_result.is_err()) + } +} diff --git a/grovedb/src/operations/proof_old/mod.rs b/grovedb/src/operations/proof_old/mod.rs new file mode 100644 index 00000000..f505b3eb --- /dev/null +++ b/grovedb/src/operations/proof_old/mod.rs @@ -0,0 +1,11 @@ +//! Proof operations + +// #[cfg(feature = "full")] +// mod generate; +#[cfg(any(feature = "full", feature = "verify"))] +pub mod util; +#[cfg(any(feature = "full", feature = "verify"))] +pub mod verify; + +// #[cfg(feature = "full")] +// pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof_old/util.rs b/grovedb/src/operations/proof_old/util.rs new file mode 100644 index 00000000..c3749a16 --- /dev/null +++ b/grovedb/src/operations/proof_old/util.rs @@ -0,0 +1,501 @@ +// MIT LICENSE +// +// Copyright (c) 2021 Dash Core Group +// +// Permission is hereby granted, free of charge, to any +// person obtaining a copy of this software and associated +// documentation files (the "Software"), to deal in the +// Software without restriction, including without +// limitation the rights to use, copy, modify, merge, +// publish, distribute, sublicense, and/or sell copies of +// the Software, and to permit persons to whom the Software +// is furnished to do so, subject to the following +// conditions: +// +// The above copyright notice and this permission notice +// shall be included in all copies or substantial portions +// of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +// DEALINGS IN THE SOFTWARE. + +use std::fmt; +#[cfg(any(feature = "full", feature = "verify"))] +use std::io::Read; +#[cfg(feature = "full")] +use std::io::Write; + +use grovedb_merk::{ + proofs::query::{Key, Path, ProvedKeyValue}, + CryptoHash, +}; +#[cfg(any(feature = "full", feature = "verify"))] +use integer_encoding::{VarInt, VarIntReader}; + +#[cfg(any(feature = "full", feature = "verify"))] +use crate::Error; +use crate::operations::proof::verify::ProvedKeyValues; + +#[cfg(any(feature = "full", feature = "verify"))] +pub const EMPTY_TREE_HASH: [u8; 32] = [0; 32]; + +pub type ProofTokenInfo = (ProofTokenType, Vec, Option>); + +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, PartialEq, Eq)] +/// Proof type +// TODO: there might be a better name for this +pub enum ProofTokenType { + Merk, + SizedMerk, + EmptyTree, + AbsentPath, + PathInfo, + Invalid, +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProofTokenType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let variant_str = match self { + ProofTokenType::Merk => "Merk", + ProofTokenType::SizedMerk => "SizedMerk", + ProofTokenType::EmptyTree => "EmptyTree", + ProofTokenType::AbsentPath => "AbsentPath", + ProofTokenType::PathInfo => "PathInfo", + ProofTokenType::Invalid => "Invalid", + }; + write!(f, "{}", variant_str) + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl From for u8 { + fn from(proof_token_type: ProofTokenType) -> Self { + match proof_token_type { + ProofTokenType::Merk => 0x01, + ProofTokenType::SizedMerk => 0x02, + ProofTokenType::EmptyTree => 0x04, + ProofTokenType::AbsentPath => 0x05, + ProofTokenType::PathInfo => 0x06, + ProofTokenType::Invalid => 0x10, + } + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl From for ProofTokenType { + fn from(val: u8) -> Self { + match val { + 0x01 => ProofTokenType::Merk, + 0x02 => ProofTokenType::SizedMerk, + 0x04 => ProofTokenType::EmptyTree, + 0x05 => ProofTokenType::AbsentPath, + 0x06 => ProofTokenType::PathInfo, + _ => ProofTokenType::Invalid, + } + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl ProofTokenType { + pub fn u8_to_display(val: u8) -> String { + match val { + 0x01 => "merk".to_string(), + 0x02 => "sized merk".to_string(), + 0x04 => "empty tree".to_string(), + 0x05 => "absent path".to_string(), + 0x06 => "path info".to_string(), + v => format!("invalid proof token {}", v), + } + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug)] +// TODO: possibility for a proof writer?? +/// Proof reader +pub struct ProofReader<'a> { + proof_data: &'a [u8], + is_verbose: bool, +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl<'a> ProofReader<'a> { + /// New proof reader + pub fn new(proof_data: &'a [u8]) -> Self { + Self { + proof_data, + is_verbose: false, + } + } + + /// New proof reader with verbose_status + pub fn new_with_verbose_status(proof_data: &'a [u8], is_verbose: bool) -> Self { + Self { + proof_data, + is_verbose, + } + } + + /// For non verbose proof read the immediate next proof, for verbose proof + /// read the first proof that matches a given key + pub fn read_next_proof(&mut self, key: &[u8]) -> Result<(ProofTokenType, Vec), Error> { + if self.is_verbose { + self.read_verbose_proof_at_key(key) + } else { + let (proof_token_type, proof, _) = self.read_proof_with_optional_type(None)?; + Ok((proof_token_type, proof)) + } + } + + /// Read the next proof, return the proof type + pub fn read_proof(&mut self) -> Result { + if self.is_verbose { + self.read_verbose_proof_with_optional_type(None) + } else { + self.read_proof_with_optional_type(None) + } + } + + /// Read verbose proof + pub fn read_verbose_proof(&mut self) -> Result { + self.read_verbose_proof_with_optional_type(None) + } + + /// Reads data from proof into slice of specific size + fn read_into_slice(&mut self, buf: &mut [u8]) -> Result { + self.proof_data + .read(buf) + .map_err(|_| Error::CorruptedData(String::from("failed to read proof data"))) + } + + /// Read varint encoded length information from proof data + fn read_length_data(&mut self) -> Result { + self.proof_data + .read_varint() + .map_err(|_| Error::InvalidProof("expected length data".to_string())) + } + + /// Read proof with optional type + pub fn read_proof_with_optional_type( + &mut self, + expected_data_type_option: Option, + ) -> Result { + let (proof_token_type, proof, _) = + self.read_proof_internal_with_optional_type(expected_data_type_option, false)?; + Ok((proof_token_type, proof, None)) + } + + /// Read verbose proof with optional type + pub fn read_verbose_proof_with_optional_type( + &mut self, + expected_data_type_option: Option, + ) -> Result { + let (proof_token_type, proof, key) = + self.read_proof_internal_with_optional_type(expected_data_type_option, true)?; + Ok(( + proof_token_type, + proof, + Some(key.ok_or(Error::InvalidProof( + "key must exist for verbose merk proofs".to_string(), + ))?), + )) + } + + /// Read verbose proof at key + /// Returns an error if it can't find a proof for that key + pub fn read_verbose_proof_at_key( + &mut self, + expected_key: &[u8], + ) -> Result<(ProofTokenType, Vec), Error> { + let (proof_token_type, proof, _) = loop { + let (proof_token_type, proof, key) = self.read_verbose_proof()?; + let key = key.expect("read_verbose_proof enforces that this exists"); + if key.as_slice() == expected_key { + break (proof_token_type, proof, key); + } + }; + + Ok((proof_token_type, proof)) + } + + /// Read proof with optional type + pub fn read_proof_internal_with_optional_type( + &mut self, + expected_data_type_option: Option, + is_verbose: bool, + ) -> Result { + let mut data_type = [0; 1]; + self.read_into_slice(&mut data_type)?; + + if let Some(expected_data_type) = expected_data_type_option { + if data_type[0] != expected_data_type { + return Err(Error::InvalidProof(format!( + "wrong data_type, expected {}, got {}", + expected_data_type, data_type[0] + ))); + } + } + + let proof_token_type: ProofTokenType = data_type[0].into(); + + if proof_token_type == ProofTokenType::EmptyTree + || proof_token_type == ProofTokenType::AbsentPath + { + return Ok((proof_token_type, vec![], None)); + } + + let (proof, key) = if proof_token_type == ProofTokenType::Merk + || proof_token_type == ProofTokenType::SizedMerk + { + // if verbose we need to read the key first + let key = if is_verbose { + let key_length = self.read_length_data()?; + + let mut key = vec![0; key_length]; + self.read_into_slice(&mut key)?; + + Some(key) + } else { + None + }; + + let proof_length = self.read_length_data()?; + + let mut proof = vec![0; proof_length]; + self.read_into_slice(&mut proof)?; + + (proof, key) + } else { + return Err(Error::InvalidProof( + "expected merk or sized merk proof".to_string(), + )); + }; + + Ok((proof_token_type, proof, key)) + } + + /// Reads path information from the proof vector + pub fn read_path_info(&mut self) -> Result>, Error> { + let mut data_type = [0; 1]; + self.read_into_slice(&mut data_type)?; + + if data_type != [Into::::into(ProofTokenType::PathInfo)] { + return Err(Error::InvalidProof(format!( + "wrong data_type, expected path_info, got {}", + ProofTokenType::u8_to_display(data_type[0]) + ))); + } + + let mut path = vec![]; + let path_slice_len = self.read_length_data()?; + + for _ in 0..path_slice_len { + let path_len = self.read_length_data()?; + let mut path_value = vec![0; path_len]; + self.read_into_slice(&mut path_value)?; + path.push(path_value); + } + + Ok(path) + } +} + +#[cfg(feature = "full")] +/// Write to vec +// TODO: this can error out handle the error +pub fn write_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { + dest.write_all(value) + .map_err(|_e| Error::InternalError("failed to write to vector")) +} + +#[cfg(feature = "full")] +/// Write a slice to the vector, first write the length of the slice +pub fn write_slice_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { + write_to_vec(dest, value.len().encode_var_vec().as_slice())?; + write_to_vec(dest, value)?; + Ok(()) +} + +#[cfg(feature = "full")] +/// Write a slice of a slice to a flat vector:w +pub fn write_slice_of_slice_to_slice(dest: &mut W, value: &[&[u8]]) -> Result<(), Error> { + // write the number of slices we are about to write + write_to_vec(dest, value.len().encode_var_vec().as_slice())?; + for inner_slice in value { + write_slice_to_vec(dest, inner_slice)?; + } + Ok(()) +} + +#[cfg(any(feature = "full", feature = "verify"))] +pub fn reduce_limit_by( + limit: &mut Option, + n: u16, +) { + if let Some(limit_value) = *limit { + if limit_value > 0 { + if limit_value >= n { + *limit = Some(limit_value - n); + } else { + *limit = Some(0); + } + } + } +} + +pub fn increase_limit_by( + limit: &mut Option, + limit_inc: u16, +) { + if let Some(limit_value) = *limit { + *limit = Some(limit_value + limit_inc); + } +} + +/// Proved path-key-values +pub type ProvedPathKeyValues = Vec; + +/// Proved path-key-value +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, PartialEq, Eq)] +pub struct ProvedPathKeyValue { + /// Path + pub path: Path, + /// Key + pub key: Key, + /// Value + pub value: Vec, + /// Proof + pub proof: CryptoHash, +} + +impl ProvedPathKeyValue { + // TODO: make path a reference + /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a + /// Path + pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyValue) -> Self { + Self { + path, + key: proved_key_value.key, + value: proved_key_value.value, + proof: proved_key_value.proof, + } + } + + /// Transforms multiple ProvedKeyValues to their equivalent + /// ProvedPathKeyValue given a Path + pub fn from_proved_key_values(path: Path, proved_key_values: ProvedKeyValues) -> Vec { + proved_key_values + .into_iter() + .map(|pkv| Self::from_proved_key_value(path.clone(), pkv)) + .collect() + } +} + +#[cfg(test)] +mod tests { + use grovedb_merk::proofs::query::ProvedKeyValue; + + use crate::operations::proof::util::{ProofTokenType, ProvedPathKeyValue}; + + #[test] + fn test_proof_token_type_encoding() { + assert_eq!(0x01_u8, Into::::into(ProofTokenType::Merk)); + assert_eq!(0x02_u8, Into::::into(ProofTokenType::SizedMerk)); + assert_eq!(0x04_u8, Into::::into(ProofTokenType::EmptyTree)); + assert_eq!(0x05_u8, Into::::into(ProofTokenType::AbsentPath)); + assert_eq!(0x06_u8, Into::::into(ProofTokenType::PathInfo)); + assert_eq!(0x10_u8, Into::::into(ProofTokenType::Invalid)); + } + + #[test] + fn test_proof_token_type_decoding() { + assert_eq!(ProofTokenType::Merk, 0x01_u8.into()); + assert_eq!(ProofTokenType::SizedMerk, 0x02_u8.into()); + assert_eq!(ProofTokenType::EmptyTree, 0x04_u8.into()); + assert_eq!(ProofTokenType::AbsentPath, 0x05_u8.into()); + assert_eq!(ProofTokenType::PathInfo, 0x06_u8.into()); + assert_eq!(ProofTokenType::Invalid, 0x10_u8.into()); + } + + #[test] + fn test_proved_path_from_single_proved_key_value() { + let path = vec![b"1".to_vec(), b"2".to_vec()]; + let proved_key_value = ProvedKeyValue { + key: b"a".to_vec(), + value: vec![5, 6], + proof: [0; 32], + }; + let proved_path_key_value = + ProvedPathKeyValue::from_proved_key_value(path.clone(), proved_key_value); + assert_eq!( + proved_path_key_value, + ProvedPathKeyValue { + path, + key: b"a".to_vec(), + value: vec![5, 6], + proof: [0; 32] + } + ); + } + + #[test] + fn test_many_proved_path_from_many_proved_key_value() { + let path = vec![b"1".to_vec(), b"2".to_vec()]; + let proved_key_value_a = ProvedKeyValue { + key: b"a".to_vec(), + value: vec![5, 6], + proof: [0; 32], + }; + let proved_key_value_b = ProvedKeyValue { + key: b"b".to_vec(), + value: vec![5, 7], + proof: [1; 32], + }; + let proved_key_value_c = ProvedKeyValue { + key: b"c".to_vec(), + value: vec![6, 7], + proof: [2; 32], + }; + let proved_key_values = vec![proved_key_value_a, proved_key_value_b, proved_key_value_c]; + let proved_path_key_values = + ProvedPathKeyValue::from_proved_key_values(path.clone(), proved_key_values); + assert_eq!(proved_path_key_values.len(), 3); + assert_eq!( + proved_path_key_values[0], + ProvedPathKeyValue { + path: path.clone(), + key: b"a".to_vec(), + value: vec![5, 6], + proof: [0; 32] + } + ); + assert_eq!( + proved_path_key_values[1], + ProvedPathKeyValue { + path: path.clone(), + key: b"b".to_vec(), + value: vec![5, 7], + proof: [1; 32] + } + ); + assert_eq!( + proved_path_key_values[2], + ProvedPathKeyValue { + path, + key: b"c".to_vec(), + value: vec![6, 7], + proof: [2; 32] + } + ); + } +} diff --git a/grovedb/src/operations/proof_old/verify.rs b/grovedb/src/operations/proof_old/verify.rs new file mode 100644 index 00000000..d26ac4ef --- /dev/null +++ b/grovedb/src/operations/proof_old/verify.rs @@ -0,0 +1,917 @@ +// MIT LICENSE +// +// Copyright (c) 2021 Dash Core Group +// +// Permission is hereby granted, free of charge, to any +// person obtaining a copy of this software and associated +// documentation files (the "Software"), to deal in the +// Software without restriction, including without +// limitation the rights to use, copy, modify, merge, +// publish, distribute, sublicense, and/or sell copies of +// the Software, and to permit persons to whom the Software +// is furnished to do so, subject to the following +// conditions: +// +// The above copyright notice and this permission notice +// shall be included in all copies or substantial portions +// of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +// DEALINGS IN THE SOFTWARE. + +//! Verify proof operations + +use std::{borrow::Cow, collections::BTreeMap}; + +use grovedb_merk::proofs::query::PathKey; +#[cfg(any(feature = "full", feature = "verify"))] +pub use grovedb_merk::proofs::query::{Path, ProvedKeyValue}; +#[cfg(any(feature = "full", feature = "verify"))] +use grovedb_merk::{ + proofs::Query, + tree::{combine_hash, value_hash as value_hash_fn}, + CryptoHash, +}; + +use crate::{ + operations::proof::util::{ + reduce_limit_by, ProvedPathKeyValue, ProvedPathKeyValues, + }, + query_result_type::PathKeyOptionalElementTrio, + versioning::read_and_consume_proof_version, + SizedQuery, +}; +#[cfg(any(feature = "full", feature = "verify"))] +use crate::{ + operations::proof::util::{ + ProofReader, ProofTokenType, ProofTokenType::AbsentPath, EMPTY_TREE_HASH, + }, + Element, Error, GroveDb, PathQuery, +}; + +#[cfg(any(feature = "full", feature = "verify"))] +pub type ProvedKeyValues = Vec; + +#[cfg(any(feature = "full", feature = "verify"))] +type EncounteredAbsence = bool; + +#[cfg(any(feature = "full", feature = "verify"))] +impl GroveDb { + /// Verify proof given a path query + /// Returns the root hash + deserialized elements + pub fn verify_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + let (root_hash, proved_path_key_values) = Self::verify_query_raw(proof, query)?; + let path_key_optional_elements = proved_path_key_values + .into_iter() + .map(|pkv| pkv.try_into()) + .collect::, Error>>()?; + Ok((root_hash, path_key_optional_elements)) + } + + /// Verify proof for a given path query returns serialized elements + pub fn verify_query_raw( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + let mut verifier = ProofVerifier::new(query); + let hash = verifier.execute_proof(proof, query, false)?; + + Ok((hash, verifier.result_set)) + } + + /// Verify proof given multiple path queries. + /// If we have more than one path query we merge before performing + /// verification. + pub fn verify_query_many( + proof: &[u8], + query: Vec<&PathQuery>, + ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + if query.len() > 1 { + let query = PathQuery::merge(query)?; + GroveDb::verify_query_raw(proof, &query) + } else { + GroveDb::verify_query_raw(proof, query[0]) + } + } + + /// Given a verbose proof, we can verify it with a subset path query. + /// Returning the root hash and the deserialized result set. + pub fn verify_subset_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + let (root_hash, proved_path_key_values) = Self::verify_subset_query_raw(proof, query)?; + let path_key_optional_elements = proved_path_key_values + .into_iter() + .map(|pkv| pkv.try_into()) + .collect::, Error>>()?; + Ok((root_hash, path_key_optional_elements)) + } + + /// Given a verbose proof, we can verify it with a subset path query. + /// Returning the root hash and the serialized result set. + pub fn verify_subset_query_raw( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + let mut verifier = ProofVerifier::new(query); + let hash = verifier.execute_proof(proof, query, true)?; + Ok((hash, verifier.result_set)) + } + + /// Verify non subset query return the absence proof + /// Returns all possible keys within the Path Query with an optional Element + /// Value Element is set to None if absent + pub fn verify_query_with_absence_proof( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + Self::verify_with_absence_proof(proof, query, Self::verify_query) + } + + /// Verify subset query return the absence proof + /// Returns all possible keys within the Path Query with an optional Element + /// Value Element is set to None if absent + pub fn verify_subset_query_with_absence_proof( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + Self::verify_with_absence_proof(proof, query, Self::verify_subset_query) + } + + /// Verifies the proof and returns both elements in the result set and the + /// elements in query but not in state. + /// Note: This only works for certain path queries. + // TODO: We should not care about terminal keys, as theoretically they can be + // infinite we should perform the absence check solely on the proof and the + // given key, this is a temporary solution + fn verify_with_absence_proof( + proof: &[u8], + query: &PathQuery, + verification_fn: T, + ) -> Result<([u8; 32], Vec), Error> + where + T: Fn(&[u8], &PathQuery) -> Result<([u8; 32], Vec), Error>, + { + // must have a limit + let max_results = query.query.limit.ok_or(Error::NotSupported( + "limits must be set in verify_query_with_absence_proof".to_string(), + ))? as usize; + + // must have no offset + if query.query.offset.is_some() { + return Err(Error::NotSupported( + "offsets are not supported for verify_query_with_absence_proof".to_string(), + )); + } + + let terminal_keys = query.terminal_keys(max_results)?; + + // need to actually verify the query + let (root_hash, result_set) = verification_fn(proof, query)?; + + // convert the result set to a btree map + let mut result_set_as_map: BTreeMap> = result_set + .into_iter() + .map(|(path, key, element)| ((path, key), element)) + .collect(); + + let result_set_with_absence: Vec = terminal_keys + .into_iter() + .map(|terminal_key| { + let element = result_set_as_map.remove(&terminal_key).flatten(); + (terminal_key.0, terminal_key.1, element) + }) + .collect(); + + Ok((root_hash, result_set_with_absence)) + } + + /// Verify subset proof with a chain of path query functions. + /// After subset verification with the first path query, the result if + /// passed to the next path query generation function which generates a + /// new path query Apply the new path query, and pass the result to the + /// next ... This is useful for verifying proofs with multiple path + /// queries that depend on one another. + pub fn verify_query_with_chained_path_queries( + proof: &[u8], + first_query: &PathQuery, + chained_path_queries: Vec, + ) -> Result<(CryptoHash, Vec>), Error> + where + C: Fn(Vec) -> Option, + { + let mut results = vec![]; + + let (last_root_hash, elements) = Self::verify_subset_query(proof, first_query)?; + results.push(elements); + + // we should iterate over each chained path queries + for path_query_generator in chained_path_queries { + let new_path_query = path_query_generator(results[results.len() - 1].clone()).ok_or( + Error::InvalidInput("one of the path query generators returns no path query"), + )?; + let (new_root_hash, new_elements) = Self::verify_subset_query(proof, &new_path_query)?; + if new_root_hash != last_root_hash { + return Err(Error::InvalidProof(format!( + "root hash for different path queries do no match, first is {}, this one is {}", + hex::encode(last_root_hash), + hex::encode(new_root_hash) + ))); + } + results.push(new_elements); + } + + Ok((last_root_hash, results)) + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +/// Proof verifier +struct ProofVerifier { + limit: Option, + result_set: ProvedPathKeyValues, +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl ProofVerifier { + /// New query + pub fn new(query: &PathQuery) -> Self { + ProofVerifier { + limit: query.query.limit, + result_set: vec![], + } + } + + /// Execute proof + pub fn execute_proof( + &mut self, + proof: &[u8], + query: &PathQuery, + is_verbose: bool, + ) -> Result<[u8; 32], Error> { + let (_proof_version, proof) = read_and_consume_proof_version(proof)?; + let mut proof_reader = ProofReader::new_with_verbose_status(proof, is_verbose); + + let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); + let mut query = Cow::Borrowed(query); + + // TODO: refactor and add better comments + // if verbose, the first thing we want to do is read the path info + if is_verbose { + let original_path = proof_reader.read_path_info()?; + + if original_path == path_slices { + // do nothing + } else if original_path.len() > path_slices.len() { + // TODO: can we relax this constraint + return Err(Error::InvalidProof( + "original path query path must not be greater than the subset path len" + .to_string(), + )); + } else { + let original_path_in_new_path = original_path + .iter() + .all(|key| path_slices.contains(&key.as_slice())); + + if !original_path_in_new_path { + return Err(Error::InvalidProof( + "the original path should be a subset of the subset path".to_string(), + )); + } else { + // We construct a new path query + let path_not_common = path_slices[original_path.len()..].to_vec(); + let mut path_iter = path_not_common.iter(); + + let mut new_query = Query::new(); + if path_iter.len() >= 1 { + new_query + .insert_key(path_iter.next().expect("confirmed has value").to_vec()); + } + + // need to add the first key to the query + new_query.set_subquery_path(path_iter.map(|a| a.to_vec()).collect()); + new_query.set_subquery(query.query.query.clone()); + + query = Cow::Owned(PathQuery::new( + original_path, + SizedQuery::new(new_query, query.query.limit, query.query.offset), + )); + } + } + } + + let (proof_token_type, proof, _) = proof_reader.read_proof()?; + + let root_hash = if proof_token_type == AbsentPath { + self.verify_absent_path(&mut proof_reader, path_slices)? + } else { + let path_owned = query.path.iter().map(|a| a.to_vec()).collect(); + let mut last_subtree_root_hash = self.execute_subquery_proof( + proof_token_type, + proof, + &mut proof_reader, + query.as_ref(), + path_owned, + )?; + + // validate the path elements are connected + self.verify_path_to_root( + query.as_ref(), + query.path.iter().map(|a| a.as_ref()).collect(), + &mut proof_reader, + &mut last_subtree_root_hash, + )? + }; + + Ok(root_hash) + } + + fn execute_subquery_proof( + &mut self, + proof_token_type: ProofTokenType, + proof: Vec, + proof_reader: &mut ProofReader, + query: &PathQuery, + path: Path, + ) -> Result<[u8; 32], Error> { + let last_root_hash: [u8; 32]; + + match proof_token_type { + ProofTokenType::SizedMerk => { + // verify proof with limit and offset values + let verification_result = self.execute_merk_proof( + ProofTokenType::SizedMerk, + &proof, + &query.query.query, + query.query.query.left_to_right, + path, + )?; + + last_root_hash = verification_result.0; + } + ProofTokenType::Merk => { + // for non leaf subtrees, we want to prove that all the queried keys + // have an accompanying proof as long as the limit is non zero + // and their child subtree is not empty + let (proof_root_hash, children) = self.execute_merk_proof( + ProofTokenType::Merk, + &proof, + &query.query.query, + query.query.query.left_to_right, + path, + )?; + + last_root_hash = proof_root_hash; + let children = children.ok_or(Error::InvalidProof( + "MERK_PROOF always returns a result set".to_string(), + ))?; + + for proved_path_key_value in children { + let ProvedPathKeyValue { + path, + key, + value: value_bytes, + proof: value_hash, + } = proved_path_key_value; + let child_element = Element::deserialize(value_bytes.as_slice())?; + match child_element { + Element::Tree(expected_root_key, _) + | Element::SumTree(expected_root_key, ..) => { + let mut expected_combined_child_hash = value_hash; + let mut current_value_bytes = value_bytes; + + if self.limit == Some(0) { + // we are done verifying the subqueries + break; + } + + let (subquery_path, subquery_value) = + Element::subquery_paths_and_value_for_sized_query( + &query.query, + key.as_slice(), + ); + + if subquery_value.is_none() && subquery_path.is_none() { + // add this element to the result set + reduce_limit_by( + &mut self.limit, + 1, + ); + + self.result_set.push( + ProvedPathKeyValue::from_proved_key_value( + path, + ProvedKeyValue { + key, + value: current_value_bytes, + proof: value_hash, + }, + ), + ); + + continue; + } + + // What is the equivalent for an empty tree + if expected_root_key.is_none() { + // child node is empty, move on to next + continue; + } + + // update the path, we are about to perform a subquery call + let mut new_path = path.to_owned(); + new_path.push(key); + + if subquery_path.is_some() + && !subquery_path.as_ref().unwrap().is_empty() + { + if subquery_value.is_none() { + self.verify_subquery_path( + proof_reader, + ProofTokenType::SizedMerk, + &mut subquery_path.expect("confirmed it has a value above"), + &mut expected_combined_child_hash, + &mut current_value_bytes, + &mut new_path, + )?; + continue; + } else { + let (_, result_set_opt, encountered_absence) = self + .verify_subquery_path( + proof_reader, + ProofTokenType::Merk, + &mut subquery_path + .expect("confirmed it has a value above"), + &mut expected_combined_child_hash, + &mut current_value_bytes, + &mut new_path, + )?; + + if encountered_absence { + // we hit an absence proof while verifying the subquery path + continue; + } + + let subquery_path_result_set = result_set_opt; + if subquery_path_result_set.is_none() { + // this means a sized proof was generated for the subquery + // key + // which is invalid as there exists a subquery value + return Err(Error::InvalidProof( + "expected unsized proof for subquery path as subquery \ + value exists" + .to_string(), + )); + } + let subquery_path_result_set = + subquery_path_result_set.expect("confirmed exists above"); + + if subquery_path_result_set.is_empty() { + // we have a valid proof that shows the absence of the + // subquery path in the tree, hence the subquery value + // cannot be applied, move on to the next. + continue; + } + + Self::update_root_key_from_subquery_path_element( + &mut expected_combined_child_hash, + &mut current_value_bytes, + &subquery_path_result_set, + )?; + } + } + + let new_path_query = + PathQuery::new_unsized(vec![], subquery_value.unwrap()); + + let (child_proof_token_type, child_proof) = proof_reader + .read_next_proof(new_path.last().unwrap_or(&Default::default()))?; + + let child_hash = self.execute_subquery_proof( + child_proof_token_type, + child_proof, + proof_reader, + &new_path_query, + new_path, + )?; + + let combined_child_hash = combine_hash( + value_hash_fn(¤t_value_bytes).value(), + &child_hash, + ) + .value() + .to_owned(); + + if combined_child_hash != expected_combined_child_hash { + return Err(Error::InvalidProof(format!( + "child hash {} doesn't match the expected hash {}", + hex::encode(combined_child_hash), + hex::encode(expected_combined_child_hash) + ))); + } + } + _ => { + // encountered a non tree element, we can't apply a subquery to it + // add it to the result set. + if self.limit == Some(0) { + break; + } + + reduce_limit_by(&mut self.limit, 1); + + self.result_set + .push(ProvedPathKeyValue::from_proved_key_value( + path, + ProvedKeyValue { + key, + value: value_bytes, + proof: value_hash, + }, + )); + } + } + } + } + ProofTokenType::EmptyTree => { + last_root_hash = EMPTY_TREE_HASH; + } + t => { + // execute_subquery_proof only expects proofs for merk trees + // root proof is handled separately + return Err(Error::InvalidProof(format!( + "wrong proof type, expected sized merk, merk or empty tree but got {}", + t + ))); + } + } + Ok(last_root_hash) + } + + /// Deserialize subkey_element and update expected root hash and element + /// value + fn update_root_key_from_subquery_path_element( + expected_child_hash: &mut CryptoHash, + current_value_bytes: &mut Vec, + subquery_path_result_set: &[ProvedPathKeyValue], + ) -> Result<(), Error> { + let elem_value = &subquery_path_result_set[0].value; + let subquery_path_element = Element::deserialize(elem_value) + .map_err(|_| Error::CorruptedData("failed to deserialize element".to_string()))?; + match subquery_path_element { + Element::Tree(..) | Element::SumTree(..) => { + *expected_child_hash = subquery_path_result_set[0].proof; + *current_value_bytes = subquery_path_result_set[0].value.to_owned(); + } + e => { + // the means that the subquery path pointed to a non tree + // element, this is not valid as you cannot apply the + // the subquery value to non tree items + return Err(Error::InvalidProof(format!( + "subquery path cannot point to non tree element, got {}", + e.type_str() + ))); + } + } + Ok(()) + } + + /// Checks that a valid proof showing the existence or absence of the + /// subquery path is present + fn verify_subquery_path( + &mut self, + proof_reader: &mut ProofReader, + expected_proof_token_type: ProofTokenType, + subquery_path: &mut Path, + expected_root_hash: &mut CryptoHash, + current_value_bytes: &mut Vec, + current_path: &mut Path, + ) -> Result<(CryptoHash, Option, EncounteredAbsence), Error> { + // the subquery path contains at least one item. + let last_key = subquery_path.remove(subquery_path.len() - 1); + + for subquery_key in subquery_path.iter() { + let (proof_token_type, subkey_proof) = + proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; + // intermediate proofs are all going to be unsized merk proofs + if proof_token_type != ProofTokenType::Merk { + return Err(Error::InvalidProof(format!( + "expected MERK proof type for intermediate subquery path keys, got {}", + proof_token_type + ))); + } + match proof_token_type { + ProofTokenType::Merk => { + let mut key_as_query = Query::new(); + key_as_query.insert_key(subquery_key.to_owned()); + current_path.push(subquery_key.to_owned()); + + let (proof_root_hash, result_set) = self.execute_merk_proof( + proof_token_type, + &subkey_proof, + &key_as_query, + key_as_query.left_to_right, + current_path.to_owned(), + )?; + + // should always be some as we force the proof type to be MERK + debug_assert!(result_set.is_some(), "{}", true); + + // result_set being empty means we could not find the given key in the subtree + // which essentially means an absence proof + if result_set + .as_ref() + .expect("result set should always be some for merk proof type") + .is_empty() + { + return Ok((proof_root_hash, None, true)); + } + + // verify that the elements in the subquery path are linked by root hashes. + let combined_child_hash = + combine_hash(value_hash_fn(current_value_bytes).value(), &proof_root_hash) + .value() + .to_owned(); + + if combined_child_hash != *expected_root_hash { + return Err(Error::InvalidProof(format!( + "child hash {} doesn't match the expected hash {}", + hex::encode(combined_child_hash), + hex::encode(expected_root_hash) + ))); + } + + // after confirming they are linked use the latest hash values for subsequent + // checks + Self::update_root_key_from_subquery_path_element( + expected_root_hash, + current_value_bytes, + &result_set.expect("confirmed is some"), + )?; + } + t => { + return Err(Error::InvalidProof(format!( + "expected merk of sized merk proof type for subquery path, got {}", + t + ))); + } + } + } + + let (proof_token_type, subkey_proof) = + proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; + if proof_token_type != expected_proof_token_type { + return Err(Error::InvalidProof(format!( + "unexpected proof type for subquery path, expected {}, got {}", + expected_proof_token_type, proof_token_type + ))); + } + + match proof_token_type { + ProofTokenType::Merk | ProofTokenType::SizedMerk => { + let mut key_as_query = Query::new(); + key_as_query.insert_key(last_key.to_owned()); + + let verification_result = self.execute_merk_proof( + proof_token_type, + &subkey_proof, + &key_as_query, + key_as_query.left_to_right, + current_path.to_owned(), + )?; + + current_path.push(last_key); + + Ok((verification_result.0, verification_result.1, false)) + } + t => Err(Error::InvalidProof(format!( + "expected merk or sized merk proof type for subquery path, got {}", + t + ))), + } + } + + fn verify_absent_path( + &mut self, + proof_reader: &mut ProofReader, + path_slices: Vec<&[u8]>, + ) -> Result<[u8; 32], Error> { + let mut root_key_hash = None; + let mut expected_child_hash = None; + let mut last_result_set: ProvedPathKeyValues = vec![]; + + for key in path_slices { + let (proof_token_type, merk_proof, _) = proof_reader.read_proof()?; + if proof_token_type == ProofTokenType::EmptyTree { + // when we encounter the empty tree op, we need to ensure + // that the expected tree hash is the combination of the + // Element_value_hash and the empty root hash [0; 32] + let combined_hash = combine_hash( + value_hash_fn(last_result_set[0].value.as_slice()).value(), + &[0; 32], + ) + .unwrap(); + if Some(combined_hash) != expected_child_hash { + return Err(Error::InvalidProof( + "proof invalid: could not verify empty subtree while generating absent \ + path proof" + .to_string(), + )); + } else { + last_result_set = vec![]; + break; + } + } else if proof_token_type != ProofTokenType::Merk { + return Err(Error::InvalidProof(format!( + "expected a merk proof for absent path, got {}", + proof_token_type + ))); + } + + let mut child_query = Query::new(); + child_query.insert_key(key.to_vec()); + + // TODO: don't pass empty vec + let proof_result = self.execute_merk_proof( + ProofTokenType::Merk, + &merk_proof, + &child_query, + true, + // cannot return a result set + Vec::new(), + )?; + + if let Some(expected_child_hash) = expected_child_hash { + let combined_hash = combine_hash( + value_hash_fn(last_result_set[0].value.as_slice()).value(), + &proof_result.0, + ) + .value() + .to_owned(); + if combined_hash != expected_child_hash { + return Err(Error::InvalidProof(format!( + "proof invalid: invalid parent, expected {}, got {}", + hex::encode(expected_child_hash), + hex::encode(combined_hash) + ))); + } + } else { + root_key_hash = Some(proof_result.0); + } + + last_result_set = proof_result + .1 + .expect("MERK_PROOF always returns a result set"); + if last_result_set.is_empty() { + // if result set is empty then we have reached the absence point, break + break; + } + + let elem = Element::deserialize(last_result_set[0].value.as_slice())?; + let child_hash = match elem { + Element::Tree(..) | Element::SumTree(..) => Ok(Some(last_result_set[0].proof)), + e => Err(Error::InvalidProof(format!( + "intermediate proofs should be for trees, got {}", + e.type_str() + ))), + }?; + expected_child_hash = child_hash; + } + + if last_result_set.is_empty() { + if let Some(hash) = root_key_hash { + Ok(hash) + } else { + Err(Error::InvalidProof( + "proof invalid: no non root tree found".to_string(), + )) + } + } else { + Err(Error::InvalidProof( + "proof invalid: path not absent".to_string(), + )) + } + } + + /// Verifies that the correct proof was provided to confirm the path in + /// query + fn verify_path_to_root( + &mut self, + query: &PathQuery, + path_slices: Vec<&[u8]>, + proof_reader: &mut ProofReader, + expected_root_hash: &mut [u8; 32], + ) -> Result<[u8; 32], Error> { + let mut split_path = path_slices.split_last(); + while let Some((key, path_slice)) = split_path { + // for every subtree, there should be a corresponding proof for the parent + // which should prove that this subtree is a child of the parent tree + let (proof_token_type, parent_merk_proof) = + proof_reader.read_next_proof(path_slice.last().unwrap_or(&Default::default()))?; + if proof_token_type != ProofTokenType::Merk { + return Err(Error::InvalidProof(format!( + "wrong data_type expected Merk Proof, got {}", + proof_token_type + ))); + } + + let mut parent_query = Query::new(); + parent_query.insert_key(key.to_vec()); + + let proof_result = self.execute_merk_proof( + ProofTokenType::Merk, + &parent_merk_proof, + &parent_query, + query.query.query.left_to_right, + // TODO: don't pass empty vec + Vec::new(), + )?; + + let result_set = proof_result + .1 + .expect("MERK_PROOF always returns a result set"); + if result_set.is_empty() || &result_set[0].key != key { + return Err(Error::InvalidProof( + "proof invalid: invalid parent".to_string(), + )); + } + + let elem = Element::deserialize(result_set[0].value.as_slice())?; + let child_hash = match elem { + Element::Tree(..) | Element::SumTree(..) => Ok(result_set[0].proof), + t => Err(Error::InvalidProof(format!( + "intermediate proofs should be for trees, got {}", + t.type_str() + ))), + }?; + + let combined_root_hash = combine_hash( + value_hash_fn(&result_set[0].value).value(), + expected_root_hash, + ) + .value() + .to_owned(); + if child_hash != combined_root_hash { + return Err(Error::InvalidProof(format!( + "Bad path: tree hash does not have expected hash, got {}, expected {}", + hex::encode(child_hash), + hex::encode(combined_root_hash) + ))); + } + + *expected_root_hash = proof_result.0; + + split_path = path_slice.split_last(); + } + + Ok(*expected_root_hash) + } + + /// Execute a merk proof, update the state when a sized proof is + /// encountered i.e. update the limit, offset and result set values + fn execute_merk_proof( + &mut self, + proof_token_type: ProofTokenType, + proof: &[u8], + query: &Query, + left_to_right: bool, + path: Path, + ) -> Result<(CryptoHash, Option), Error> { + let is_sized_proof = proof_token_type == ProofTokenType::SizedMerk; + let mut limit = None; + + if is_sized_proof { + limit = self.limit; + } + + let (hash, result) = + grovedb_merk::execute_proof(proof, query, limit, left_to_right) + .unwrap() + .map_err(|e| { + eprintln!("{e}"); + Error::InvalidProof("invalid proof verification parameters".to_string()) + })?; + + // convert the result set to proved_path_key_values + let proved_path_key_values = + ProvedPathKeyValue::from_proved_key_values(path, result.result_set); + + if is_sized_proof { + self.limit = result.limit; + self.result_set.extend(proved_path_key_values); + Ok((hash, None)) + } else { + Ok((hash, Some(proved_path_key_values))) + } + } +} diff --git a/grovedb/src/operations/proof_v2/generate.rs b/grovedb/src/operations/proof_v2/generate.rs deleted file mode 100644 index 93032723..00000000 --- a/grovedb/src/operations/proof_v2/generate.rs +++ /dev/null @@ -1,583 +0,0 @@ -//! Generate proof operations - -use std::collections::BTreeMap; -use bincode::{Decode, Encode}; -use derive_more::From; -use grovedb_costs::{ - cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, - CostsExt, OperationCost, -}; -use grovedb_merk::{ - proofs::{encode_into, Node, Op}, - tree::value_hash, - KVIterator, Merk, ProofWithoutEncodingResult, -}; -use grovedb_merk::proofs::query::{Key, QueryItem}; -use grovedb_path::SubtreePath; -use grovedb_storage::StorageContext; - -use crate::{ - element::helpers::raw_decode, - operations::proof::util::{ - increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, - write_slice_to_vec, write_to_vec, ProofTokenType, - }, - reference_path::path_from_reference_path_type, - Element, Error, GroveDb, PathQuery, Query, -}; -use crate::query_result_type::{BTreeMapLevelResult, BTreeMapLevelResultOrItem, QueryResultType}; - -#[derive(Debug, Clone, Copy)] -pub struct ProveOptions { - pub is_verbose: bool, - pub multilevel_results: bool, -} - -impl Default for ProveOptions { - fn default() -> Self { - ProveOptions { - is_verbose: false, - multilevel_results: false, - } - } -} - -#[derive(Encode, Decode)] -pub struct LayerProof { - pub merk_proof : Vec, - pub lower_layers : BTreeMap -} - -#[derive(Encode, Decode, From)] -pub enum GroveDBProof { - V0(GroveDBProofV0) -} - -#[derive(Encode, Decode)] -pub struct GroveDBProofV0 { - pub root_layer : LayerProof, -} - -impl GroveDb { - /// Prove one or more path queries. - /// If we have more than one path query, we merge into a single path query before - /// proving. - pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { - if query.len() > 1 { - let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query, prove_options) - } else { - self.prove_query(query[0], prove_options) - } - } - - /// Generate a minimalistic proof for a given path query - /// doesn't allow for subset verification - /// Proofs generated with this can only be verified by the path query used - /// to generate them. - pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { - self.prove_internal_serialized(query, prove_options) - } - - /// Generates a proof and serializes it - fn prove_internal_serialized(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { - let mut cost = OperationCost::default(); - let proof = cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); - let config = bincode::config::standard() - .with_big_endian() - .with_no_limit(); - let encoded_proof = cost_return_on_error_no_add!(&cost, bincode::encode_to_vec(proof, config).map_err(|e| Error::CorruptedData(format!("unable to encode proof {}", e)))); - Ok(encoded_proof).wrap_with_cost(cost) - } - - /// Generates a proof - fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult { - let ProveOptions { - is_verbose, multilevel_results - } = prove_options.unwrap_or_default(); - let mut cost = OperationCost::default(); - - if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { - return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); - } - - // we want to query raw because we want the references to not be resolved at this point - - let precomputed_result_map = cost_return_on_error!( - &mut cost, - self.query_raw(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0.to_btree_map_level_results(); - - - println!("precomputed results are {:?}", precomputed_result_map); - - let root_layer = cost_return_on_error!( - &mut cost, - self.prove_subqueries( - vec![], - path_query, - precomputed_result_map, - ) - ); - - Ok(GroveDBProofV0 { - root_layer - }.into()).wrap_with_cost(cost) - } - - /// Perform a pre-order traversal of the tree based on the provided - /// subqueries - fn prove_subqueries( - &self, - path: Vec<&[u8]>, - path_query: &PathQuery, - layer_precomputed_results: BTreeMapLevelResult, - ) -> CostResult { - - let mut cost = OperationCost::default(); - - let (query_at_path, left_to_right) = cost_return_on_error_no_add!( - &cost, - path_query.query_items_at_path(path.as_slice()).ok_or(Error::CorruptedPath("path should be part of path_query"))); - - - - let subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path(path.as_slice().into(), None) - ); - - let limit = layer_precomputed_results.key_values.len(); - - let merk_proof = cost_return_on_error!(&mut cost, self.generate_merk_proof( - &path.as_slice().into(), - &subtree, - &query_at_path, - left_to_right, - Some(limit as u16), - )); - - let lower_layers = cost_return_on_error_no_add!( - &cost, - layer_precomputed_results.key_values.into_iter().filter_map(|(key, value)| { - match value { - BTreeMapLevelResultOrItem::BTreeMapLevelResult(layer) => { - let mut lower_path = path.clone(); - lower_path.push(key.as_slice()); - match self.prove_subqueries(lower_path, path_query, layer).unwrap_add_cost(&mut cost) { - Ok(layer_proof) => { - Some(Ok((key, layer_proof))) - } - Err(e) => { Some(Err(e))} - } - } - BTreeMapLevelResultOrItem::ResultItem(_) => { - None - } - } - - }).collect::, Error>>()); - - Ok(LayerProof { - merk_proof, - lower_layers - }).wrap_with_cost(cost) - } - - /// Generates query proof given a subtree and appends the result to a proof - /// list - fn generate_merk_proof<'a, S, B>( - &self, - path: &SubtreePath, - subtree: &'a Merk, - query_items: &Vec, - left_to_right: bool, - limit: Option, - ) -> CostResult, Error> - where - S: StorageContext<'a> + 'a, - B: AsRef<[u8]>, - { - let mut cost = OperationCost::default(); - - let mut proof_result = cost_return_on_error_no_add!( - &cost, - subtree - .prove_unchecked(query_items.clone(), limit, left_to_right) - .unwrap() - .map_err(|_e| Error::InternalError("failed to generate proof")) - ); - - cost_return_on_error!(&mut cost, self.post_process_merk_proof(path, &mut proof_result)); - - let mut proof_bytes = Vec::with_capacity(128); - encode_into(proof_result.iter(), &mut proof_bytes); - - Ok(proof_bytes).wrap_with_cost(cost) - } - - /// Serializes a path and add it to the proof vector - fn generate_and_store_path_proof( - path: Vec<&[u8]>, - proofs: &mut Vec, - ) -> CostResult<(), Error> { - let cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) - ); - - cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); - - Ok(()).wrap_with_cost(cost) - } - - fn generate_and_store_absent_path_proof( - &self, - path_slices: &[&[u8]], - proof_result: &mut Vec, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) - ); - let mut current_path: Vec<&[u8]> = vec![]; - - let mut split_path = path_slices.split_first(); - while let Some((key, path_slice)) = split_path { - let subtree = self - .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - let Ok(subtree) = subtree else { - break; - }; - - let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); - - let mut next_key_query = Query::new(); - next_key_query.insert_key(key.to_vec()); - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - ¤t_path.as_slice().into(), - &subtree, - &next_key_query, - None, - ProofTokenType::Merk, - proof_result, - is_verbose, - current_path.iter().last().unwrap_or(&(&[][..])) - ) - ); - - current_path.push(key); - - if has_item.is_err() || path_slice.is_empty() { - // reached last key - break; - } - - split_path = path_slice.split_first(); - } - - Ok(()).wrap_with_cost(cost) - } - - /// Converts Items to Node::KV from Node::KVValueHash - /// Converts References to Node::KVRefValueHash and sets the value to the - /// referenced element - fn post_process_merk_proof>( - &self, - path: &SubtreePath, - proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - for op in proof_result.proof.iter_mut() { - match op { - Op::Push(node) | Op::PushInverted(node) => match node { - Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { - let elem = Element::deserialize(value); - match elem { - Ok(Element::Reference(reference_path, ..)) => { - let absolute_path = cost_return_on_error!( - &mut cost, - path_from_reference_path_type( - reference_path, - &path.to_vec(), - Some(key.as_slice()) - ) - .wrap_with_cost(OperationCost::default()) - ); - - let referenced_elem = cost_return_on_error!( - &mut cost, - self.follow_reference( - absolute_path.as_slice().into(), - true, - None - ) - ); - - let serialized_referenced_elem = referenced_elem.serialize(); - if serialized_referenced_elem.is_err() { - return Err(Error::CorruptedData(String::from( - "unable to serialize element", - ))) - .wrap_with_cost(cost); - } - - *node = Node::KVRefValueHash( - key.to_owned(), - serialized_referenced_elem.expect("confirmed ok above"), - value_hash(value).unwrap_add_cost(&mut cost), - ) - } - Ok(Element::Item(..)) => { - *node = Node::KV(key.to_owned(), value.to_owned()) - } - _ => continue, - } - } - _ => continue, - }, - _ => continue, - } - } - Ok(()).wrap_with_cost(cost) - } -} -// -// #[cfg(test)] -// mod tests { -// use grovedb_merk::{execute_proof, proofs::Query}; -// use grovedb_storage::StorageBatch; -// -// use crate::{ -// operations::proof::util::{ProofReader, ProofTokenType}, -// tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, -// GroveDb, -// }; -// -// #[test] -// fn test_path_info_encoding_and_decoding() { -// let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; -// let mut proof_vector = vec![]; -// GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) -// .unwrap() -// .unwrap(); -// -// let mut proof_reader = ProofReader::new(proof_vector.as_slice()); -// let decoded_path = proof_reader.read_path_info().unwrap(); -// -// assert_eq!(path, decoded_path); -// } -// -// #[test] -// fn test_reading_of_verbose_proofs() { -// let db = make_deep_tree(); -// -// let path = vec![TEST_LEAF, b"innertree"]; -// let mut query = Query::new(); -// query.insert_all(); -// -// let batch = StorageBatch::new(); -// -// let merk = db -// .open_non_transactional_merk_at_path( -// [TEST_LEAF, b"innertree"].as_ref().into(), -// Some(&batch), -// ) -// .unwrap() -// .unwrap(); -// let expected_root_hash = merk.root_hash().unwrap(); -// -// let mut proof = vec![]; -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proof, -// true, -// b"innertree", -// ) -// .unwrap() -// .unwrap(); -// assert_ne!(proof.len(), 0); -// -// let mut proof_reader = ProofReader::new(&proof); -// let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// assert_eq!(key, Some(b"innertree".to_vec())); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) -// .unwrap() -// .unwrap(); -// assert_eq!(root_hash, expected_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // what is the key is empty?? -// let merk = db -// .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) -// .unwrap() -// .unwrap(); -// let expected_root_hash = merk.root_hash().unwrap(); -// -// let mut proof = vec![]; -// db.generate_and_store_merk_proof( -// &EMPTY_PATH, -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proof, -// true, -// &[], -// ) -// .unwrap() -// .unwrap(); -// assert_ne!(proof.len(), 0); -// -// let mut proof_reader = ProofReader::new(&proof); -// let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// assert_eq!(key, Some(vec![])); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) -// .unwrap() -// .unwrap(); -// assert_eq!(root_hash, expected_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// } -// -// #[test] -// fn test_reading_verbose_proof_at_key() { -// // going to generate an array of multiple proofs with different keys -// let db = make_deep_tree(); -// let mut proofs = vec![]; -// -// let mut query = Query::new(); -// query.insert_all(); -// -// // insert all under inner tree -// let path = vec![TEST_LEAF, b"innertree"]; -// -// let batch = StorageBatch::new(); -// -// let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) -// .unwrap() -// .unwrap(); -// let inner_tree_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // insert all under innertree4 -// let path = vec![TEST_LEAF, b"innertree4"]; -// let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) -// .unwrap() -// .unwrap(); -// let inner_tree_4_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // insert all for deeper_1 -// let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; -// let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) -// .unwrap() -// .unwrap(); -// let deeper_1_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // read the proof at innertree -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = proof_reader -// .read_verbose_proof_at_key(b"innertree") -// .unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) -// .unwrap() -// .unwrap(); -// assert_eq!(root_hash, inner_tree_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // read the proof at innertree4 -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = proof_reader -// .read_verbose_proof_at_key(b"innertree4") -// .unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) -// .unwrap() -// .unwrap(); -// assert_eq!(root_hash, inner_tree_4_root_hash); -// assert_eq!(result_set.result_set.len(), 2); -// -// // read the proof at deeper_1 -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = -// proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, true) -// .unwrap() -// .unwrap(); -// assert_eq!(root_hash, deeper_1_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // read the proof at an invalid key -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); -// assert!(reading_result.is_err()) -// } -// } diff --git a/grovedb/src/operations/proof_v2/mod.rs b/grovedb/src/operations/proof_v2/mod.rs deleted file mode 100644 index 2e4ae617..00000000 --- a/grovedb/src/operations/proof_v2/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! Proof operations - -#[cfg(feature = "full")] -mod generate; - -#[cfg(feature = "full")] -pub use generate::ProveOptions; diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 75778137..cb450e6e 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -28,8 +28,7 @@ //! Queries -use std::borrow::Cow; -use std::cmp::Ordering; +use std::{borrow::Cow, cmp::Ordering}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::query::query_item::QueryItem; @@ -270,8 +269,15 @@ impl PathQuery { } } } - pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option<(Cow<'a, Vec>, bool)> { - fn recursive_query_items<'b>(query: &'b Query, path: &[&[u8]]) -> Option<(Cow<'b, Vec>, bool)> { + + pub fn query_items_at_path<'a>( + &'a self, + path: &[&[u8]], + ) -> Option<(Cow<'a, Vec>, bool)> { + fn recursive_query_items<'b>( + query: &'b Query, + path: &[&[u8]], + ) -> Option<(Cow<'b, Vec>, bool)> { if path.is_empty() { return Some((Cow::Borrowed(&query.items), query.left_to_right)); } @@ -283,16 +289,28 @@ impl PathQuery { if query_item.contains(key) { if let Some(subquery_path) = &subquery_branch.subquery_path { if path.len() <= subquery_path.len() { - if path.iter().zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + if path + .iter() + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { return if path.len() == subquery_path.len() { if let Some(subquery) = &subquery_branch.subquery { - Some((Cow::Borrowed(&subquery.items), subquery.left_to_right)) + Some(( + Cow::Borrowed(&subquery.items), + subquery.left_to_right, + )) } else { None } } else { - Some((Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), true)) - } + Some(( + Cow::Owned(vec![QueryItem::Key( + subquery_path[path.len()].clone(), + )]), + true, + )) + }; } } } @@ -301,14 +319,18 @@ impl PathQuery { recursive_query_items(subquery, &path[1..]) } else { Some((Cow::Owned(vec![QueryItem::Key(key.to_vec())]), true)) - } + }; } } } if let Some(subquery_path) = &query.default_subquery_branch.subquery_path { if path.len() <= subquery_path.len() { - if path.iter().zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + if path + .iter() + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { return if path.len() == subquery_path.len() { if let Some(subquery) = &query.default_subquery_branch.subquery { Some((Cow::Borrowed(&subquery.items), subquery.left_to_right)) @@ -316,10 +338,18 @@ impl PathQuery { None } } else { - Some((Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), true)) - } + Some(( + Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), + true, + )) + }; } - } else if path.iter().take(subquery_path.len()).zip(subquery_path).all(|(a, b)| *a == b.as_slice()) { + } else if path + .iter() + .take(subquery_path.len()) + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { if let Some(subquery) = &query.default_subquery_branch.subquery { return recursive_query_items(subquery, &path[subquery_path.len()..]); } @@ -337,14 +367,20 @@ impl PathQuery { match given_path_len.cmp(&self_path_len) { Ordering::Less => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some((Cow::Owned(vec![QueryItem::Key(self.path[given_path_len].clone())]), true)) + Some(( + Cow::Owned(vec![QueryItem::Key(self.path[given_path_len].clone())]), + true, + )) } else { None } } Ordering::Equal => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some((Cow::Borrowed(&self.query.query.items), self.query.query.left_to_right)) + Some(( + Cow::Borrowed(&self.query.query.items), + self.query.query.left_to_right, + )) } else { None } @@ -400,7 +436,10 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge path queries"); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_tree) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set_tree.len(), 2); @@ -438,7 +477,10 @@ mod tests { assert_eq!(merged_path_query.path, vec![TEST_LEAF.to_vec()]); assert_eq!(merged_path_query.query.query.items.len(), 2); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -496,7 +538,10 @@ mod tests { query_three.clone(), ); - let proof = temp_db.prove_query(&path_query_three, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&path_query_three, None) + .unwrap() + .unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -632,7 +677,10 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 7); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, proved_result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -703,7 +751,10 @@ mod tests { .expect("expect to merge path queries"); assert_eq!(merged_path_query.path, vec![b"deep_leaf".to_vec()]); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -765,7 +816,10 @@ mod tests { query_three, ); - let proof = temp_db.prove_query(&path_query_three, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&path_query_three, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -774,7 +828,10 @@ mod tests { PathQuery::merge(vec![&path_query_one, &path_query_two, &path_query_three]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); @@ -811,7 +868,10 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -926,7 +986,10 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 4); - let proof = temp_db.prove_query(&merged_path_query, None).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index e7ce2d6a..52cf59d7 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -64,7 +64,7 @@ pub enum BTreeMapLevelResultOrItem { /// BTreeMap level result #[derive(Debug, Clone)] pub struct BTreeMapLevelResult { - pub key_values : BTreeMap + pub key_values: BTreeMap, } impl BTreeMapLevelResult { @@ -76,11 +76,11 @@ impl BTreeMapLevelResult { match current.key_values.get(*segment) { Some(BTreeMapLevelResultOrItem::BTreeMapLevelResult(next_level)) => { current = next_level; - }, + } Some(BTreeMapLevelResultOrItem::ResultItem(_)) => { // We've reached a ResultItem before the end of the path return 0; - }, + } None => { // Path not found return 0; @@ -310,37 +310,43 @@ impl QueryResultElements { element: Element, ) { if let Some(segment) = path.next() { - let next_level = current_level.key_values - .entry(segment) - .or_insert_with(|| BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { - key_values: BTreeMap::new() - })); + let next_level = current_level.key_values.entry(segment).or_insert_with(|| { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new(), + }) + }); match next_level { BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) => { insert_recursive(inner, path, key, element); - }, + } BTreeMapLevelResultOrItem::ResultItem(_) => { - // This shouldn't happen in a well-formed structure, but we'll handle it anyway - *next_level = BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { - key_values: BTreeMap::new() - }); + // This shouldn't happen in a well-formed structure, but we'll handle it + // anyway + *next_level = + BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new(), + }); if let BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) = next_level { insert_recursive(inner, path, key, element); } - }, + } } } else { - current_level.key_values.insert(key, BTreeMapLevelResultOrItem::ResultItem(element)); + current_level + .key_values + .insert(key, BTreeMapLevelResultOrItem::ResultItem(element)); } } let mut root = BTreeMapLevelResult { - key_values: BTreeMap::new() + key_values: BTreeMap::new(), }; for result_item in self.elements { - if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = result_item { + if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = + result_item + { insert_recursive(&mut root, path.into_iter(), key, element); } } diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index fec4b867..f85311ca 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Tests pub mod common; @@ -47,11 +19,11 @@ use tempfile::TempDir; use self::common::EMPTY_PATH; use super::*; use crate::{ - query_result_type::QueryResultType::QueryKeyElementPairResultType, - reference_path::ReferencePathType, tests::common::compare_result_tuples, + operations::proof::ProveOptions, + query_result_type::{QueryResultType, QueryResultType::QueryKeyElementPairResultType}, + reference_path::ReferencePathType, + tests::common::compare_result_tuples, }; -use crate::operations::proof_v2::ProveOptions; -use crate::query_result_type::QueryResultType; pub const TEST_LEAF: &[u8] = b"test_leaf"; @@ -567,7 +539,6 @@ pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { .unwrap() .expect("successful item insert"); - // Add a -> "storage" to deep_node_1 temp_db .insert( @@ -629,14 +600,7 @@ pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { .expect("successful sum item insert"); // Add items to 4, 5, 6, 7 - for (key, value) in [ - (b"d", b"v1"), - (b"e", b"v4"), - (b"f", b"v1"), - (b"g", b"v4"), - ] - .iter() - { + for (key, value) in [(b"d", b"v1"), (b"e", b"v4"), (b"f", b"v1"), (b"g", b"v4")].iter() { temp_db .insert( [DEEP_LEAF, b"deep_node_1", key.as_slice()].as_ref(), @@ -662,11 +626,7 @@ pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { // Add items to sum trees in d, e, f for key in [b"d", b"e", b"f"].iter() { - let (value1, value2) = if *key == b"d" { - (4, 1) - } else { - (1, 4) - }; + let (value1, value2) = if *key == b"d" { (4, 1) } else { (1, 4) }; temp_db .insert( @@ -729,958 +689,962 @@ pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { temp_db } -#[test] -fn test_init() { - let tmp_dir = TempDir::new().unwrap(); - GroveDb::open(tmp_dir).expect("empty tree is ok"); -} +mod tests { + use super::*; -#[test] -fn test_element_with_flags() { - let db = make_test_grovedb(); + #[test] + fn test_init() { + let tmp_dir = TempDir::new().unwrap(); + GroveDb::open(tmp_dir).expect("empty tree is ok"); + } - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem1", - Element::new_item(b"flagless".to_vec()), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem2", - Element::new_item_with_flags(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem3", - Element::new_tree_with_flags(None, Some([1].to_vec())), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1", b"elem3"].as_ref(), - b"elem4", - Element::new_reference_with_flags( - ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key1".to_vec(), - b"elem2".to_vec(), - ]), - Some([9].to_vec()), - ), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); + #[test] + fn test_element_with_flags() { + let db = make_test_grovedb(); - let element_without_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem1", None) - .unwrap() - .expect("should get successfully"); - let element_with_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem2", None) + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("should get successfully"); - let tree_element_with_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem3", None) + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem1", + Element::new_item(b"flagless".to_vec()), + None, + None, + ) .unwrap() - .expect("should get successfully"); - let flagged_ref_follow = db - .get([TEST_LEAF, b"key1", b"elem3"].as_ref(), b"elem4", None) + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem2", + Element::new_item_with_flags(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())), + None, + None, + ) .unwrap() - .expect("should get successfully"); - - let mut query = Query::new(); - query.insert_key(b"elem4".to_vec()); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"key1".to_vec(), b"elem3".to_vec()], - SizedQuery::new(query, None, None), - ); - let (flagged_ref_no_follow, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryKeyElementPairResultType, + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem3", + Element::new_tree_with_flags(None, Some([1].to_vec())), + None, None, ) .unwrap() - .expect("should get successfully"); - - assert_eq!( - element_without_flag, - Element::Item(b"flagless".to_vec(), None) - ); - assert_eq!( - element_with_flag, - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!(tree_element_with_flag.get_flags(), &Some([1].to_vec())); - assert_eq!( - flagged_ref_follow, - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!( - flagged_ref_no_follow.to_key_elements()[0], - ( - b"elem4".to_vec(), - Element::Reference( + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1", b"elem3"].as_ref(), + b"elem4", + Element::new_reference_with_flags( ReferencePathType::AbsolutePathReference(vec![ TEST_LEAF.to_vec(), b"key1".to_vec(), - b"elem2".to_vec() + b"elem2".to_vec(), ]), - None, - Some([9].to_vec()) - ) + Some([9].to_vec()), + ), + None, + None, ) - ); - - // Test proofs with flags - let mut query = Query::new(); - query.insert_all(); - - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"key1".to_vec()], - SizedQuery::new(query, None, None), - ); - let proof = db - .prove_query(&path_query, None) - .unwrap() - .expect("should successfully create proof"); - let (root_hash, result_set) = - GroveDb::verify_query_raw(&proof, &path_query).expect("should verify proof"); - assert_eq!(root_hash, db.grove_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - assert_eq!( - Element::deserialize(&result_set[0].value).expect("should deserialize element"), - Element::Item(b"flagless".to_vec(), None) - ); - assert_eq!( - Element::deserialize(&result_set[1].value).expect("should deserialize element"), - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!( - Element::deserialize(&result_set[2].value) - .expect("should deserialize element") - .get_flags(), - &Some([1].to_vec()) - ); -} - -#[test] -fn test_cannot_update_populated_tree_item() { - // This test shows that you cannot update a tree item - // in a way that disconnects it's root hash from that of - // the merk it points to. - let db = make_deep_tree(); - - let old_element = db - .get([TEST_LEAF].as_ref(), b"innertree", None) - .unwrap() - .expect("should fetch item"); - - let new_element = Element::empty_tree(); - db.insert( - [TEST_LEAF].as_ref(), - b"innertree", - new_element.clone(), - None, - None, - ) - .unwrap() - .expect_err("should not override tree"); - - let current_element = db - .get([TEST_LEAF].as_ref(), b"innertree", None) .unwrap() - .expect("should fetch item"); - - assert_eq!(current_element, old_element); - assert_ne!(current_element, new_element); -} - -#[test] -fn test_changes_propagated() { - let db = make_test_grovedb(); - let old_hash = db.root_hash(None).unwrap().unwrap(); - let element = Element::new_item(b"ayy".to_vec()); - - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); + .expect("should insert subtree successfully"); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + let element_without_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem1", None) .unwrap() - .expect("successful get"), - element - ); - assert_ne!(old_hash, db.root_hash(None).unwrap().unwrap()); -} - -// TODO: Add solid test cases to this - -#[test] -fn test_references() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"merk_1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_1"].as_ref(), - b"key1", - Element::new_item(b"value1".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_1"].as_ref(), - b"key2", - Element::new_item(b"value2".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - b"merk_2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // db.insert([TEST_LEAF, b"merk_2"].as_ref(), b"key2", - // Element::new_item(b"value2".to_vec()), None).expect("successful subtree - // insert"); - db.insert( - [TEST_LEAF, b"merk_2"].as_ref(), - b"key1", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"merk_1".to_vec(), - b"key1".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_2"].as_ref(), - b"key2", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"merk_1".to_vec(), - b"key2".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - assert!(db - .get([TEST_LEAF].as_ref(), b"merk_1", None) - .unwrap() - .is_ok()); - assert!(db - .get([TEST_LEAF].as_ref(), b"merk_2", None) - .unwrap() - .is_ok()); -} - -#[test] -fn test_follow_references() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); - - // Insert an item to refer to - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Insert a reference - db.insert( - [TEST_LEAF].as_ref(), - b"reference_key", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful reference insert"); - - assert_eq!( - db.get([TEST_LEAF].as_ref(), b"reference_key", None) + .expect("should get successfully"); + let element_with_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem2", None) .unwrap() - .expect("successful get"), - element - ); -} + .expect("should get successfully"); + let tree_element_with_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem3", None) + .unwrap() + .expect("should get successfully"); + let flagged_ref_follow = db + .get([TEST_LEAF, b"key1", b"elem3"].as_ref(), b"elem4", None) + .unwrap() + .expect("should get successfully"); -#[test] -fn test_reference_must_point_to_item() { - let db = make_test_grovedb(); + let mut query = Query::new(); + query.insert_key(b"elem4".to_vec()); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"key1".to_vec(), b"elem3".to_vec()], + SizedQuery::new(query, None, None), + ); + let (flagged_ref_no_follow, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryKeyElementPairResultType, + None, + ) + .unwrap() + .expect("should get successfully"); - let result = db - .insert( - [TEST_LEAF].as_ref(), - b"reference_key_1", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"reference_key_2".to_vec(), - ])), - None, - None, - ) - .unwrap(); + assert_eq!( + element_without_flag, + Element::Item(b"flagless".to_vec(), None) + ); + assert_eq!( + element_with_flag, + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!(tree_element_with_flag.get_flags(), &Some([1].to_vec())); + assert_eq!( + flagged_ref_follow, + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!( + flagged_ref_no_follow.to_key_elements()[0], + ( + b"elem4".to_vec(), + Element::Reference( + ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"key1".to_vec(), + b"elem2".to_vec() + ]), + None, + Some([9].to_vec()) + ) + ) + ); - assert!(matches!(result, Err(Error::MissingReference(_)))); -} + // Test proofs with flags + let mut query = Query::new(); + query.insert_all(); -#[test] -fn test_too_many_indirections() { - use crate::operations::get::MAX_REFERENCE_HOPS; - let db = make_test_grovedb(); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"key1".to_vec()], + SizedQuery::new(query, None, None), + ); + let proof = db + .prove_query(&path_query, None) + .unwrap() + .expect("should successfully create proof"); + let (root_hash, result_set) = + GroveDb::verify_query_raw(&proof, &path_query).expect("should verify proof"); + assert_eq!(root_hash, db.grove_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + assert_eq!( + Element::deserialize(&result_set[0].value).expect("should deserialize element"), + Element::Item(b"flagless".to_vec(), None) + ); + assert_eq!( + Element::deserialize(&result_set[1].value).expect("should deserialize element"), + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!( + Element::deserialize(&result_set[2].value) + .expect("should deserialize element") + .get_flags(), + &Some([1].to_vec()) + ); + } - let keygen = |idx| format!("key{}", idx).bytes().collect::>(); + #[test] + fn test_cannot_update_populated_tree_item() { + // This test shows that you cannot update a tree item + // in a way that disconnects it's root hash from that of + // the merk it points to. + let db = make_deep_tree(); - db.insert( - [TEST_LEAF].as_ref(), - b"key0", - Element::new_item(b"oops".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful item insert"); + let old_element = db + .get([TEST_LEAF].as_ref(), b"innertree", None) + .unwrap() + .expect("should fetch item"); - for i in 1..=(MAX_REFERENCE_HOPS) { + let new_element = Element::empty_tree(); db.insert( [TEST_LEAF].as_ref(), - &keygen(i), - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - keygen(i - 1), - ])), + b"innertree", + new_element.clone(), None, None, ) .unwrap() - .expect("successful reference insert"); - } + .expect_err("should not override tree"); - // Add one more reference - db.insert( - [TEST_LEAF].as_ref(), - &keygen(MAX_REFERENCE_HOPS + 1), - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - keygen(MAX_REFERENCE_HOPS), - ])), - None, - None, - ) - .unwrap() - .expect("expected insert"); + let current_element = db + .get([TEST_LEAF].as_ref(), b"innertree", None) + .unwrap() + .expect("should fetch item"); - let result = db - .get([TEST_LEAF].as_ref(), &keygen(MAX_REFERENCE_HOPS + 1), None) - .unwrap(); + assert_eq!(current_element, old_element); + assert_ne!(current_element, new_element); + } - assert!(matches!(result, Err(Error::ReferenceLimit))); -} + #[test] + fn test_changes_propagated() { + let db = make_test_grovedb(); + let old_hash = db.root_hash(None).unwrap().unwrap(); + let element = Element::new_item(b"ayy".to_vec()); -#[test] -fn test_reference_value_affects_state() { - let db_one = make_test_grovedb(); - db_one - .insert( + // Insert some nested subtrees + db.insert( [TEST_LEAF].as_ref(), b"key1", - Element::new_item(vec![0]), + Element::empty_tree(), None, None, ) .unwrap() - .expect("should insert item"); - db_one - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"ref", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key1".to_vec(), - ])), + .expect("successful subtree 1 insert"); + + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), None, None, ) .unwrap() - .expect("should insert item"); + .expect("successful subtree 2 insert"); - let db_two = make_test_grovedb(); - db_two - .insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::new_item(vec![0]), + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), None, None, ) .unwrap() - .expect("should insert item"); - db_two - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"ref", - Element::new_reference(ReferencePathType::UpstreamRootHeightReference( - 0, - vec![TEST_LEAF.to_vec(), b"key1".to_vec()], - )), - None, - None, - ) - .unwrap() - .expect("should insert item"); + .expect("successful value insert"); - assert_ne!( - db_one - .root_hash(None) - .unwrap() - .expect("should return root hash"), - db_two - .root_hash(None) - .unwrap() - .expect("should return toor hash") - ); -} + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get"), + element + ); + assert_ne!(old_hash, db.root_hash(None).unwrap().unwrap()); + } -#[test] -fn test_tree_structure_is_persistent() { - let tmp_dir = TempDir::new().unwrap(); - let element = Element::new_item(b"ayy".to_vec()); - // Create a scoped GroveDB - let prev_root_hash = { - let mut db = GroveDb::open(tmp_dir.path()).unwrap(); - add_test_leaves(&mut db); + // TODO: Add solid test cases to this - // Insert some nested subtrees + #[test] + fn test_references() { + let db = make_test_grovedb(); db.insert( [TEST_LEAF].as_ref(), - b"key1", + b"merk_1", Element::empty_tree(), None, None, ) .unwrap() - .expect("successful subtree 1 insert"); + .expect("successful subtree insert"); db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), + [TEST_LEAF, b"merk_1"].as_ref(), + b"key1", + Element::new_item(b"value1".to_vec()), None, None, ) .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree + .expect("successful subtree insert"); db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), + [TEST_LEAF, b"merk_1"].as_ref(), + b"key2", + Element::new_item(b"value2".to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) - .unwrap() - .expect("successful get 1"), - element - ); - db.root_hash(None).unwrap().unwrap() - }; - // Open a persisted GroveDB - let db = GroveDb::open(tmp_dir).unwrap(); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) - .unwrap() - .expect("successful get 2"), - element - ); - assert!(db - .get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key4", None) - .unwrap() - .is_err()); - assert_eq!(prev_root_hash, db.root_hash(None).unwrap().unwrap()); -} - -#[test] -fn test_root_tree_leaves_are_noted() { - let db = make_test_grovedb(); - db.check_subtree_exists_path_not_found([TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("should exist"); - db.check_subtree_exists_path_not_found([ANOTHER_TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("should exist"); -} - -#[test] -fn test_proof_for_invalid_path_root_key() { - let db = make_test_grovedb(); - - let query = Query::new(); - let path_query = PathQuery::new_unsized(vec![b"invalid_path_key".to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} - -#[test] -fn test_proof_for_invalid_path() { - let db = make_deep_tree(); - - let query = Query::new(); - let path_query = - PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"invalid_key".to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"early_invalid_key".to_vec(), - b"deeper_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} - -#[test] -fn test_proof_for_non_existent_data() { - let temp_db = make_test_grovedb(); - - let mut query = Query::new(); - query.insert_key(b"key1".to_vec()); - - // path to empty subtree - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} - -#[test] -fn test_path_query_proofs_without_subquery_with_reference() { - // Tree Structure - // root - // test_leaf - // innertree - // k1,v1 - // k2,v2 - // k3,v3 - // another_test_leaf - // innertree2 - // k3,v3 - // k4, reference to k1 in innertree - // k5, reference to k4 in innertree3 - // innertree3 - // k4,v4 + .expect("successful subtree insert"); - // Insert elements into grovedb instance - let temp_db = make_test_grovedb(); - // Insert level 1 nodes - temp_db - .insert( + db.insert( [TEST_LEAF].as_ref(), - b"innertree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree3", + b"merk_2", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert level 2 nodes - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), + // db.insert([TEST_LEAF, b"merk_2"].as_ref(), b"key2", + // Element::new_item(b"value2".to_vec()), None).expect("successful subtree + // insert"); + db.insert( + [TEST_LEAF, b"merk_2"].as_ref(), b"key1", - Element::new_item(b"value1".to_vec()), + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"merk_1".to_vec(), + b"key1".to_vec(), + ])), None, None, ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), + db.insert( + [TEST_LEAF, b"merk_2"].as_ref(), b"key2", - Element::new_item(b"value2".to_vec()), + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"merk_1".to_vec(), + b"key2".to_vec(), + ])), None, None, ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), + assert!(db + .get([TEST_LEAF].as_ref(), b"merk_1", None) + .unwrap() + .is_ok()); + assert!(db + .get([TEST_LEAF].as_ref(), b"merk_2", None) + .unwrap() + .is_ok()); + } + + #[test] + fn test_follow_references() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); + + // Insert an item to refer to + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), b"key3", - Element::new_item(b"value3".to_vec()), + element.clone(), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key4", + .expect("successful value insert"); + + // Insert a reference + db.insert( + [TEST_LEAF].as_ref(), + b"reference_key", Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ TEST_LEAF.to_vec(), - b"innertree".to_vec(), - b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), ])), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), - b"key4", - Element::new_item(b"value4".to_vec()), + .expect("successful reference insert"); + + assert_eq!( + db.get([TEST_LEAF].as_ref(), b"reference_key", None) + .unwrap() + .expect("successful get"), + element + ); + } + + #[test] + fn test_reference_must_point_to_item() { + let db = make_test_grovedb(); + + let result = db + .insert( + [TEST_LEAF].as_ref(), + b"reference_key_1", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"reference_key_2".to_vec(), + ])), + None, + None, + ) + .unwrap(); + + assert!(matches!(result, Err(Error::MissingReference(_)))); + } + + #[test] + fn test_too_many_indirections() { + use crate::operations::get::MAX_REFERENCE_HOPS; + let db = make_test_grovedb(); + + let keygen = |idx| format!("key{}", idx).bytes().collect::>(); + + db.insert( + [TEST_LEAF].as_ref(), + b"key0", + Element::new_item(b"oops".to_vec()), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key5", + .expect("successful item insert"); + + for i in 1..=(MAX_REFERENCE_HOPS) { + db.insert( + [TEST_LEAF].as_ref(), + &keygen(i), + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + keygen(i - 1), + ])), + None, + None, + ) + .unwrap() + .expect("successful reference insert"); + } + + // Add one more reference + db.insert( + [TEST_LEAF].as_ref(), + &keygen(MAX_REFERENCE_HOPS + 1), Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - ANOTHER_TEST_LEAF.to_vec(), - b"innertree3".to_vec(), - b"key4".to_vec(), + TEST_LEAF.to_vec(), + keygen(MAX_REFERENCE_HOPS), ])), None, None, ) .unwrap() - .expect("successful subtree insert"); + .expect("expected insert"); - // Single key query - let mut query = Query::new(); - query.insert_range_from(b"key4".to_vec()..); - - let path_query = PathQuery::new_unsized( - vec![ANOTHER_TEST_LEAF.to_vec(), b"innertree2".to_vec()], - query, - ); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - assert_eq!( - hex::encode(&proof), - "010285010198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01bb979cb\ - e4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2b\ - b83ff3a40504d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6\ - 579350009000676616c75653400f084ffdbc429a89c9b6620e7224d73c2e\ - e505eb7e6fb5eb574e1a8dc8b0d0884110158040a696e6e6572747265653\ - 200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4\ - 709357d40de223d769547ec21013a090155ea7d14038c7062d94930798f8\ - 85a19d6ebff8a87489a1debf665604711015e02cfb7d035b8f4a3631be46\ - c597510a16770c15c74331b3dc8dcb577a206e49675040a746573745f6c6\ - 5616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a\ - 988c0ef1372178245152fa9a43b209a6b6d95589bdc11" - ); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let result = db + .get([TEST_LEAF].as_ref(), &keygen(MAX_REFERENCE_HOPS + 1), None) + .unwrap(); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); - let r2 = Element::new_item(b"value4".to_vec()).serialize().unwrap(); + assert!(matches!(result, Err(Error::ReferenceLimit))); + } - compare_result_tuples( - result_set, - vec![(b"key4".to_vec(), r1), (b"key5".to_vec(), r2)], - ); -} + #[test] + fn test_reference_value_affects_state() { + let db_one = make_test_grovedb(); + db_one + .insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(vec![0]), + None, + None, + ) + .unwrap() + .expect("should insert item"); + db_one + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"ref", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"key1".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("should insert item"); -#[test] -fn test_path_query_proofs_without_subquery() { - // Tree Structure - // root - // test_leaf - // innertree - // k1,v1 - // k2,v2 - // k3,v3 - // another_test_leaf - // innertree2 - // k3,v3 - // innertree3 - // k4,v4 + let db_two = make_test_grovedb(); + db_two + .insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(vec![0]), + None, + None, + ) + .unwrap() + .expect("should insert item"); + db_two + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"ref", + Element::new_reference(ReferencePathType::UpstreamRootHeightReference( + 0, + vec![TEST_LEAF.to_vec(), b"key1".to_vec()], + )), + None, + None, + ) + .unwrap() + .expect("should insert item"); - // Insert elements into grovedb instance - let temp_db = make_test_grovedb(); - // Insert level 1 nodes - temp_db - .insert( - [TEST_LEAF].as_ref(), - b"innertree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert level 2 nodes - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key1", - Element::new_item(b"value1".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key2", - Element::new_item(b"value2".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), - b"key4", - Element::new_item(b"value4".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); + assert_ne!( + db_one + .root_hash(None) + .unwrap() + .expect("should return root hash"), + db_two + .root_hash(None) + .unwrap() + .expect("should return toor hash") + ); + } + + #[test] + fn test_tree_structure_is_persistent() { + let tmp_dir = TempDir::new().unwrap(); + let element = Element::new_item(b"ayy".to_vec()); + // Create a scoped GroveDB + let prev_root_hash = { + let mut db = GroveDb::open(tmp_dir.path()).unwrap(); + add_test_leaves(&mut db); + + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get 1"), + element + ); + db.root_hash(None).unwrap().unwrap() + }; + // Open a persisted GroveDB + let db = GroveDb::open(tmp_dir).unwrap(); + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get 2"), + element + ); + assert!(db + .get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key4", None) + .unwrap() + .is_err()); + assert_eq!(prev_root_hash, db.root_hash(None).unwrap().unwrap()); + } + + #[test] + fn test_root_tree_leaves_are_noted() { + let db = make_test_grovedb(); + db.check_subtree_exists_path_not_found([TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("should exist"); + db.check_subtree_exists_path_not_found([ANOTHER_TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("should exist"); + } + + #[test] + fn test_proof_for_invalid_path_root_key() { + let db = make_test_grovedb(); + + let query = Query::new(); + let path_query = PathQuery::new_unsized(vec![b"invalid_path_key".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } + + #[test] + fn test_proof_for_invalid_path() { + let db = make_deep_tree(); + + let query = Query::new(); + let path_query = + PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"invalid_key".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"early_invalid_key".to_vec(), + b"deeper_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } + + #[test] + fn test_proof_for_non_existent_data() { + let temp_db = make_test_grovedb(); + + let mut query = Query::new(); + query.insert_key(b"key1".to_vec()); + + // path to empty subtree + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } + + #[test] + fn test_path_query_proofs_without_subquery_with_reference() { + // Tree Structure + // root + // test_leaf + // innertree + // k1,v1 + // k2,v2 + // k3,v3 + // another_test_leaf + // innertree2 + // k3,v3 + // k4, reference to k1 in innertree + // k5, reference to k4 in innertree3 + // innertree3 + // k4,v4 + + // Insert elements into grovedb instance + let temp_db = make_test_grovedb(); + // Insert level 1 nodes + temp_db + .insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert level 2 nodes + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"value1".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key2", + Element::new_item(b"value2".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key4", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"innertree".to_vec(), + b"key1".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), + b"key4", + Element::new_item(b"value4".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key5", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + ANOTHER_TEST_LEAF.to_vec(), + b"innertree3".to_vec(), + b"key4".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); - // Single key query - let mut query = Query::new(); - query.insert_key(b"key1".to_vec()); + // Single key query + let mut query = Query::new(); + query.insert_range_from(b"key4".to_vec()..); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + let path_query = PathQuery::new_unsized( + vec![ANOTHER_TEST_LEAF.to_vec(), b"innertree2".to_vec()], + query, + ); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - assert_eq!( + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + assert_eq!( + hex::encode(&proof), + "010285010198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01bb979cb\ + e4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2b\ + b83ff3a40504d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6\ + 579350009000676616c75653400f084ffdbc429a89c9b6620e7224d73c2e\ + e505eb7e6fb5eb574e1a8dc8b0d0884110158040a696e6e6572747265653\ + 200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4\ + 709357d40de223d769547ec21013a090155ea7d14038c7062d94930798f8\ + 85a19d6ebff8a87489a1debf665604711015e02cfb7d035b8f4a3631be46\ + c597510a16770c15c74331b3dc8dcb577a206e49675040a746573745f6c6\ + 5616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a\ + 988c0ef1372178245152fa9a43b209a6b6d95589bdc11" + ); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); + let r2 = Element::new_item(b"value4".to_vec()).serialize().unwrap(); + + compare_result_tuples( + result_set, + vec![(b"key4".to_vec(), r1), (b"key5".to_vec(), r2)], + ); + } + + #[test] + fn test_path_query_proofs_without_subquery() { + // Tree Structure + // root + // test_leaf + // innertree + // k1,v1 + // k2,v2 + // k3,v3 + // another_test_leaf + // innertree2 + // k3,v3 + // innertree3 + // k4,v4 + + // Insert elements into grovedb instance + let temp_db = make_test_grovedb(); + // Insert level 1 nodes + temp_db + .insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert level 2 nodes + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"value1".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key2", + Element::new_item(b"value2".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), + b"key4", + Element::new_item(b"value4".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + // Single key query + let mut query = Query::new(); + query.insert_key(b"key1".to_vec()); + + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + assert_eq!( hex::encode(proof.as_slice()), "01025503046b6579310009000676616c7565310002018655e18e4555b0b65\ bbcec64c749db6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1\ @@ -1691,1166 +1655,1271 @@ fn test_path_query_proofs_without_subquery() { 443731ae2a4eae521e4a9a79c331c8a7e22e34c0f1a6e01b55f830550604\ 719833d54ce2bf139aff4bb699fa4111b9741633554318792c511" ); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key1".to_vec(), r1)]); - - // Range query + limit - let mut query = Query::new(); - query.insert_range_after(b"key1".to_vec()..); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(1), None), - ); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); + compare_result_tuples(result_set, vec![(b"key1".to_vec(), r1)]); + + // Range query + limit + let mut query = Query::new(); + query.insert_range_after(b"key1".to_vec()..); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(1), None), + ); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key2".to_vec(), r1)]); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); + compare_result_tuples(result_set, vec![(b"key2".to_vec(), r1)]); - // Range query + direction + limit - let mut query = Query::new_with_direction(false); - query.insert_all(); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(2), None), - ); + // Range query + direction + limit + let mut query = Query::new_with_direction(false); + query.insert_all(); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(2), None), + ); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); - let r2 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); - compare_result_tuples( - result_set, - vec![(b"key3".to_vec(), r1), (b"key2".to_vec(), r2)], - ); -} + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); + let r2 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); + compare_result_tuples( + result_set, + vec![(b"key3".to_vec(), r1), (b"key2".to_vec(), r2)], + ); + } + + #[test] + fn test_path_query_proofs_with_default_subquery() { + let temp_db = make_deep_tree(); -#[test] -fn test_path_query_proofs_with_default_subquery() { - let temp_db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - let mut query = Query::new(); - query.insert_range_after(b"innertree".to_vec()..); - - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - - let keys = [b"key4".to_vec(), b"key5".to_vec()]; - let values = [b"value4".to_vec(), b"value5".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // range subquery - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_range_after_to_inclusive(b"key1".to_vec()..=b"key4".to_vec()); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect( - "should + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + let mut query = Query::new(); + query.insert_range_after(b"innertree".to_vec()..); + + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 2); + + let keys = [b"key4".to_vec(), b"key5".to_vec()]; + let values = [b"value4".to_vec(), b"value5".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // range subquery + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_range_after_to_inclusive(b"key1".to_vec()..=b"key4".to_vec()); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect( + "should execute proof", - ); + ); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key2".to_vec(), b"key3".to_vec(), b"key4".to_vec()]; - let values = [b"value2".to_vec(), b"value3".to_vec(), b"value4".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // deep tree test - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - let mut sub_subquery = Query::new(); - sub_subquery.insert_all(); - - subq.set_subquery(sub_subquery); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 14); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key7".to_vec(), - b"key8".to_vec(), - b"key9".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - b"key12".to_vec(), - b"key13".to_vec(), - b"key14".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value7".to_vec(), - b"value8".to_vec(), - b"value9".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - b"value12".to_vec(), - b"value13".to_vec(), - b"value14".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); -#[test] -fn test_path_query_proofs_with_subquery_path() { - let temp_db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_key(b"deeper_1".to_vec()); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with valid n > 1 valid translation - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with empty subquery path - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![]); - query.set_subquery(subq); - - let path_query = - PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"deep_node_1".to_vec()], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 6); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with an invalid translation - // should generate a valid absence proof with an empty result set - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![ - b"deep_node_1".to_vec(), - b"deeper_10".to_vec(), - b"another_invalid_key".to_vec(), - ]); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + let keys = [b"key2".to_vec(), b"key3".to_vec(), b"key4".to_vec()]; + let values = [b"value2".to_vec(), b"value3".to_vec(), b"value4".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); -#[test] -fn test_path_query_proofs_with_key_and_subquery() { - let temp_db = make_deep_tree(); + // deep tree test + let mut query = Query::new(); + query.insert_all(); - let mut query = Query::new(); - query.insert_key(b"deep_node_1".to_vec()); + let mut subq = Query::new(); + subq.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); + let mut sub_subquery = Query::new(); + sub_subquery.insert_all(); - query.set_subquery_key(b"deeper_1".to_vec()); - query.set_subquery(subq); + subq.set_subquery(sub_subquery); + query.set_subquery(subq); - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key7".to_vec(), + b"key8".to_vec(), + b"key9".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value7".to_vec(), + b"value8".to_vec(), + b"value9".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } -#[test] -fn test_path_query_proofs_with_conditional_subquery() { - let temp_db = make_deep_tree(); + #[test] + fn test_path_query_proofs_with_subquery_path() { + let temp_db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); + let mut query = Query::new(); + query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); - let mut final_subquery = Query::new(); - final_subquery.insert_all(); + query.set_subquery_key(b"deeper_1".to_vec()); + query.set_subquery(subq); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_subquery), - ); + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - query.set_subquery(subquery); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); - let keys = [ - b"deeper_1".to_vec(), - b"deeper_2".to_vec(), - b"deeper_3".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - b"deeper_5".to_vec(), - ]; - assert_eq!(result_set.len(), keys.len()); + // test subquery path with valid n > 1 valid translation + let mut query = Query::new(); + query.insert_all(); - // TODO: Is this defined behaviour - for (index, key) in keys.iter().enumerate() { - assert_eq!(&result_set[index].key, key); - } + let mut subq = Query::new(); + subq.insert_all(); - // Default + Conditional subquery - let mut query = Query::new(); - query.insert_all(); + query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); + query.set_subquery(subq); - let mut subquery = Query::new(); - subquery.insert_all(); + let path_query = PathQuery::new_unsized(vec![], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); - let mut final_conditional_subquery = Query::new(); - final_conditional_subquery.insert_all(); + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); - let mut final_default_subquery = Query::new(); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + // test subquery path with empty subquery path + let mut query = Query::new(); + query.insert_all(); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); - - query.set_subquery(subquery); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 6); - - let keys = [ - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - ]; - let values = [ - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - ]; - let elements = values - .map(|x| Element::new_item(x).serialize().unwrap()) - .to_vec(); - // compare_result_sets(&elements, &result_set); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let mut subq = Query::new(); + subq.insert_all(); -#[test] -fn test_path_query_proofs_with_sized_query() { - let temp_db = make_deep_tree(); + query.set_subquery_path(vec![]); + query.set_subquery(subq); - let mut query = Query::new(); - query.insert_all(); + let path_query = + PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"deep_node_1".to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 6); - let mut subquery = Query::new(); - subquery.insert_all(); + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // test subquery path with an invalid translation + // should generate a valid absence proof with an empty result set + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_all(); + + query.set_subquery_path(vec![ + b"deep_node_1".to_vec(), + b"deeper_10".to_vec(), + b"another_invalid_key".to_vec(), + ]); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } - let mut final_conditional_subquery = Query::new(); - final_conditional_subquery.insert_all(); + #[test] + fn test_path_query_proofs_with_key_and_subquery() { + let temp_db = make_deep_tree(); - let mut final_default_subquery = Query::new(); - final_default_subquery.insert_range_inclusive(b"key4".to_vec()..=b"key6".to_vec()); + let mut query = Query::new(); + query.insert_key(b"deep_node_1".to_vec()); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); + let mut subq = Query::new(); + subq.insert_all(); - query.set_subquery(subquery); + query.set_subquery_key(b"deeper_1".to_vec()); + query.set_subquery(subq); - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), None), - ); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key4".to_vec(), b"key5".to_vec(), b"key6".to_vec()]; - let values = [b"value4".to_vec(), b"value5".to_vec(), b"value6".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); -#[test] -fn test_path_query_proof_with_range_subquery_and_limit() { - let db = make_deep_tree(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - // Create a path query with a range query, subquery, and limit - let mut main_query = Query::new(); - main_query.insert_range_after(b"deeper_3".to_vec()..); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); - let mut subquery = Query::new(); - subquery.insert_all(); + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } - main_query.set_subquery(subquery); + #[test] + fn test_path_query_proofs_with_conditional_subquery() { + let temp_db = make_deep_tree(); - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], - SizedQuery::new(main_query.clone(), Some(3), None), - ); + let mut query = Query::new(); + query.insert_all(); - // Generate proof - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let mut subquery = Query::new(); + subquery.insert_all(); - // Verify proof - let verification_result = GroveDb::verify_query_raw(&proof, &path_query); + let mut final_subquery = Query::new(); + final_subquery.insert_all(); - match verification_result { - Ok((hash, result_set)) => { - // Check if the hash matches the root hash - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - // Check if we got the correct number of results - assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); - } - Err(e) => { - panic!("Proof verification failed: {:?}", e); + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_subquery), + ); + + query.set_subquery(subquery); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + + let keys = [ + b"deeper_1".to_vec(), + b"deeper_2".to_vec(), + b"deeper_3".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"deeper_5".to_vec(), + ]; + assert_eq!(result_set.len(), keys.len()); + + // TODO: Is this defined behaviour + for (index, key) in keys.iter().enumerate() { + assert_eq!(&result_set[index].key, key); } + + // Default + Conditional subquery + let mut query = Query::new(); + query.insert_all(); + + let mut subquery = Query::new(); + subquery.insert_all(); + + let mut final_conditional_subquery = Query::new(); + final_conditional_subquery.insert_all(); + + let mut final_default_subquery = Query::new(); + final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); + + query.set_subquery(subquery); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 6); + + let keys = [ + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + ]; + let values = [ + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + ]; + let elements = values + .map(|x| Element::new_item(x).serialize().unwrap()) + .to_vec(); + // compare_result_sets(&elements, &result_set); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); } - // Now test without a limit to compare - let path_query_no_limit = PathQuery::new( - vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], - SizedQuery::new(main_query.clone(), None, None), - ); + #[test] + fn test_path_query_proofs_with_sized_query() { + let temp_db = make_deep_tree(); + + let mut query = Query::new(); + query.insert_all(); - let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); - let verification_result_no_limit = GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + let mut subquery = Query::new(); + subquery.insert_all(); - match verification_result_no_limit { - Ok((hash, result_set)) => { - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + let mut final_conditional_subquery = Query::new(); + final_conditional_subquery.insert_all(); + + let mut final_default_subquery = Query::new(); + final_default_subquery.insert_range_inclusive(b"key4".to_vec()..=b"key6".to_vec()); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); + + query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec()], + SizedQuery::new(query, Some(3), None), + ); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + let keys = [b"key4".to_vec(), b"key5".to_vec(), b"key6".to_vec()]; + let values = [b"value4".to_vec(), b"value5".to_vec(), b"value6".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } + + #[test] + fn test_path_query_proof_with_range_subquery_and_limit() { + let db = make_deep_tree(); + + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_range_after(b"deeper_3".to_vec()..); + + let mut subquery = Query::new(); + subquery.insert_all(); + + main_query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); + + // Generate proof + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + + // Verify proof + let verification_result = GroveDb::verify_query_raw(&proof, &path_query); + + match verification_result { + Ok((hash, result_set)) => { + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + } + Err(e) => { + panic!("Proof verification failed: {:?}", e); + } } - Err(e) => { - panic!("Proof verification failed (no limit): {:?}", e); + + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); + + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = + GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } } } -} -#[test] -fn test_path_query_proof_with_range_subquery_and_limit_with_sum_trees() { - let db = make_deep_tree_with_sum_trees(); + #[test] + fn test_path_query_proof_with_range_subquery_and_limit_with_sum_trees() { + let db = make_deep_tree_with_sum_trees(); - // Create a path query with a range query, subquery, and limit - let mut main_query = Query::new(); - main_query.insert_key(b"a".to_vec()); - main_query.insert_range_after(b"b".to_vec()..); + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_key(b"a".to_vec()); + main_query.insert_range_after(b"b".to_vec()..); - let mut subquery = Query::new(); - subquery.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); - main_query.set_subquery(subquery); + main_query.set_subquery(subquery); - main_query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, None); + main_query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, None); - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], - SizedQuery::new(main_query.clone(), Some(3), None), - ); + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); - let non_proved_result_elements = db.query(&path_query, false, false, false, QueryResultType::QueryPathKeyElementTrioResultType, None).unwrap().expect("expected query to execute").0; + let non_proved_result_elements = db + .query( + &path_query, + false, + false, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected query to execute") + .0; - assert_eq!(non_proved_result_elements.len(), 3, "Expected 3 results due to limit"); + assert_eq!( + non_proved_result_elements.len(), + 3, + "Expected 3 results due to limit" + ); - let key_elements = non_proved_result_elements.to_key_elements(); + let key_elements = non_proved_result_elements.to_key_elements(); - assert_eq!(key_elements, vec![(vec![97], Element::new_item("storage".as_bytes().to_vec())), (vec![49], Element::SumTree(Some(vec![0;32]), 2, None)), (vec![48], Element::new_item("v1".as_bytes().to_vec()))]); + assert_eq!( + key_elements, + vec![ + (vec![97], Element::new_item("storage".as_bytes().to_vec())), + (vec![49], Element::SumTree(Some(vec![0; 32]), 2, None)), + (vec![48], Element::new_item("v1".as_bytes().to_vec())) + ] + ); - // Generate proof - let proof = db.prove_query(&path_query, Some(ProveOptions { is_verbose: false, multilevel_results: true })).unwrap().unwrap(); + // Generate proof + let proof = db + .prove_query( + &path_query, + Some(ProveOptions { + is_verbose: false, + multilevel_results: true, + }), + ) + .unwrap() + .unwrap(); - // Verify proof - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).expect("proof verification failed"); + // Verify proof + let (hash, result_set) = + GroveDb::verify_query_raw(&proof, &path_query).expect("proof verification failed"); - // Check if the hash matches the root hash - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - // Check if we got the correct number of results - assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); - // Now test without a limit to compare - let path_query_no_limit = PathQuery::new( - vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], - SizedQuery::new(main_query.clone(), None, None), - ); + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); - let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); - let verification_result_no_limit = GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = + GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); - match verification_result_no_limit { - Ok((hash, result_set)) => { - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); - } - Err(e) => { - panic!("Proof verification failed (no limit): {:?}", e); + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } } } -} -#[test] -fn test_path_query_proofs_with_direction() { - let temp_db = make_deep_tree(); + #[test] + fn test_path_query_proofs_with_direction() { + let temp_db = make_deep_tree(); - let mut query = Query::new_with_direction(false); - query.insert_all(); + let mut query = Query::new_with_direction(false); + query.insert_all(); - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); - let mut final_conditional_subquery = Query::new_with_direction(false); - final_conditional_subquery.insert_all(); + let mut final_conditional_subquery = Query::new_with_direction(false); + final_conditional_subquery.insert_all(); - let mut final_default_subquery = Query::new_with_direction(false); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + let mut final_default_subquery = Query::new_with_direction(false); + final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); - query.set_subquery(subquery); + query.set_subquery(subquery); - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(4), None), - ); - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - let keys = [b"key11".to_vec(), b"key10".to_vec(), b"key6".to_vec(), b"key5".to_vec()]; - let values = [b"value11".to_vec(), b"value10".to_vec(), b"value6".to_vec(), b"value5".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // combined directions - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new_with_direction(false); - subq.insert_all(); - - let mut sub_subquery = Query::new(); - sub_subquery.insert_all(); - - subq.set_subquery(sub_subquery); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 14); - - let keys = [ - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key12".to_vec(), - b"key13".to_vec(), - b"key14".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - b"key7".to_vec(), - b"key8".to_vec(), - b"key9".to_vec(), - ]; - let values = [ - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value12".to_vec(), - b"value13".to_vec(), - b"value14".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - b"value7".to_vec(), - b"value8".to_vec(), - b"value9".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec()], + SizedQuery::new(query, Some(4), None), + ); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + let keys = [ + b"key11".to_vec(), + b"key10".to_vec(), + b"key6".to_vec(), + b"key5".to_vec(), + ]; + let values = [ + b"value11".to_vec(), + b"value10".to_vec(), + b"value6".to_vec(), + b"value5".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // combined directions + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new_with_direction(false); + subq.insert_all(); + + let mut sub_subquery = Query::new(); + sub_subquery.insert_all(); + + subq.set_subquery(sub_subquery); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); + + let keys = [ + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"key7".to_vec(), + b"key8".to_vec(), + b"key9".to_vec(), + ]; + let values = [ + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + b"value7".to_vec(), + b"value8".to_vec(), + b"value9".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } -#[test] -fn test_checkpoint() { - let db = make_test_grovedb(); - let element1 = Element::new_item(b"ayy".to_vec()); + #[test] + fn test_checkpoint() { + let db = make_test_grovedb(); + let element1 = Element::new_item(b"ayy".to_vec()); - db.insert(EMPTY_PATH, b"key1", Element::empty_tree(), None, None) + db.insert(EMPTY_PATH, b"key1", Element::empty_tree(), None, None) + .unwrap() + .expect("cannot insert a subtree 1 into GroveDB"); + db.insert( + [b"key1".as_ref()].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("cannot insert a subtree 1 into GroveDB"); - db.insert( - [b"key1".as_ref()].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("cannot insert a subtree 2 into GroveDB"); - db.insert( - [b"key1".as_ref(), b"key2".as_ref()].as_ref(), - b"key3", - element1.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert an item into GroveDB"); + .expect("cannot insert a subtree 2 into GroveDB"); + db.insert( + [b"key1".as_ref(), b"key2".as_ref()].as_ref(), + b"key3", + element1.clone(), + None, + None, + ) + .unwrap() + .expect("cannot insert an item into GroveDB"); - assert_eq!( - db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) - .unwrap() - .expect("cannot get from grovedb"), - element1 - ); + assert_eq!( + db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from grovedb"), + element1 + ); - let tempdir_parent = TempDir::new().expect("cannot open tempdir"); - let checkpoint_tempdir = tempdir_parent.path().join("checkpoint"); - db.create_checkpoint(&checkpoint_tempdir) - .expect("cannot create checkpoint"); + let tempdir_parent = TempDir::new().expect("cannot open tempdir"); + let checkpoint_tempdir = tempdir_parent.path().join("checkpoint"); + db.create_checkpoint(&checkpoint_tempdir) + .expect("cannot create checkpoint"); - let checkpoint_db = - GroveDb::open(checkpoint_tempdir).expect("cannot open grovedb from checkpoint"); + let checkpoint_db = + GroveDb::open(checkpoint_tempdir).expect("cannot open grovedb from checkpoint"); + + assert_eq!( + db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from grovedb"), + element1 + ); + assert_eq!( + checkpoint_db + .get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from checkpoint"), + element1 + ); + + let element2 = Element::new_item(b"ayy2".to_vec()); + let element3 = Element::new_item(b"ayy3".to_vec()); - assert_eq!( - db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) - .unwrap() - .expect("cannot get from grovedb"), - element1 - ); - assert_eq!( checkpoint_db - .get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .insert( + [b"key1".as_ref()].as_ref(), + b"key4", + element2.clone(), + None, + None, + ) .unwrap() - .expect("cannot get from checkpoint"), - element1 - ); - - let element2 = Element::new_item(b"ayy2".to_vec()); - let element3 = Element::new_item(b"ayy3".to_vec()); + .expect("cannot insert into checkpoint"); - checkpoint_db - .insert( + db.insert( [b"key1".as_ref()].as_ref(), b"key4", - element2.clone(), + element3.clone(), None, None, ) .unwrap() - .expect("cannot insert into checkpoint"); + .expect("cannot insert into GroveDB"); - db.insert( - [b"key1".as_ref()].as_ref(), - b"key4", - element3.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert into GroveDB"); + assert_eq!( + checkpoint_db + .get([b"key1".as_ref()].as_ref(), b"key4", None) + .unwrap() + .expect("cannot get from checkpoint"), + element2, + ); + + assert_eq!( + db.get([b"key1".as_ref()].as_ref(), b"key4", None) + .unwrap() + .expect("cannot get from GroveDB"), + element3 + ); - assert_eq!( checkpoint_db - .get([b"key1".as_ref()].as_ref(), b"key4", None) + .insert( + [b"key1".as_ref()].as_ref(), + b"key5", + element3.clone(), + None, + None, + ) .unwrap() - .expect("cannot get from checkpoint"), - element2, - ); + .expect("cannot insert into checkpoint"); - assert_eq!( - db.get([b"key1".as_ref()].as_ref(), b"key4", None) + db.insert([b"key1".as_ref()].as_ref(), b"key6", element3, None, None) .unwrap() - .expect("cannot get from GroveDB"), - element3 - ); + .expect("cannot insert into GroveDB"); + + assert!(matches!( + checkpoint_db + .get([b"key1".as_ref()].as_ref(), b"key6", None) + .unwrap(), + Err(Error::PathKeyNotFound(_)) + )); + + assert!(matches!( + db.get([b"key1".as_ref()].as_ref(), b"key5", None).unwrap(), + Err(Error::PathKeyNotFound(_)) + )); + } - checkpoint_db - .insert( - [b"key1".as_ref()].as_ref(), - b"key5", - element3.clone(), + #[test] + fn test_is_empty_tree() { + let db = make_test_grovedb(); + + // Create an empty tree with no elements + db.insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), None, None, ) .unwrap() - .expect("cannot insert into checkpoint"); + .unwrap(); + + assert!(db + .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) + .unwrap() + .expect("path is valid tree")); - db.insert([b"key1".as_ref()].as_ref(), b"key6", element3, None, None) + // add an element to the tree to make it non empty + db.insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"hello".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot insert into GroveDB"); + .unwrap(); + assert!(!db + .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) + .unwrap() + .expect("path is valid tree")); + } - assert!(matches!( - checkpoint_db - .get([b"key1".as_ref()].as_ref(), b"key6", None) - .unwrap(), - Err(Error::PathKeyNotFound(_)) - )); - - assert!(matches!( - db.get([b"key1".as_ref()].as_ref(), b"key5", None).unwrap(), - Err(Error::PathKeyNotFound(_)) - )); -} + #[test] + fn transaction_should_be_aborted_when_rollback_is_called() { + let item_key = b"key3"; -#[test] -fn test_is_empty_tree() { - let db = make_test_grovedb(); + let db = make_test_grovedb(); + let transaction = db.start_transaction(); - // Create an empty tree with no elements - db.insert( - [TEST_LEAF].as_ref(), - b"innertree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .unwrap(); + let element1 = Element::new_item(b"ayy".to_vec()); - assert!(db - .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) - .unwrap() - .expect("path is valid tree")); + let result = db + .insert( + [TEST_LEAF].as_ref(), + item_key, + element1, + None, + Some(&transaction), + ) + .unwrap(); - // add an element to the tree to make it non empty - db.insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key1", - Element::new_item(b"hello".to_vec()), - None, - None, - ) - .unwrap() - .unwrap(); - assert!(!db - .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) + assert!(matches!(result, Ok(()))); + + db.rollback_transaction(&transaction).unwrap(); + + let result = db + .get([TEST_LEAF].as_ref(), item_key, Some(&transaction)) + .unwrap(); + assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); + } + + #[test] + fn transaction_should_be_aborted() { + let db = make_test_grovedb(); + let transaction = db.start_transaction(); + + let item_key = b"key3"; + let element = Element::new_item(b"ayy".to_vec()); + + db.insert( + [TEST_LEAF].as_ref(), + item_key, + element, + None, + Some(&transaction), + ) .unwrap() - .expect("path is valid tree")); -} + .unwrap(); -#[test] -fn transaction_should_be_aborted_when_rollback_is_called() { - let item_key = b"key3"; + drop(transaction); - let db = make_test_grovedb(); - let transaction = db.start_transaction(); + // Transactional data shouldn't be committed to the main database + let result = db.get([TEST_LEAF].as_ref(), item_key, None).unwrap(); + assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); + } - let element1 = Element::new_item(b"ayy".to_vec()); + #[test] + fn test_subtree_pairs_iterator() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); + let element2 = Element::new_item(b"lmao".to_vec()); - let result = db - .insert( + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"subtree1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"subtree11", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key1", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + assert_eq!( + db.get( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key1", + None + ) + .unwrap() + .expect("successful get 1"), + element + ); + db.insert( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key0", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"subtree12", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 3 insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"key1", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"key2", + element2.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + // Iterate over subtree1 to see if keys of other subtrees messed up + // let mut iter = db + // .elements_iterator([TEST_LEAF, b"subtree1"].as_ref(), None) + // .expect("cannot create iterator"); + let storage_context = db + .grove_db + .db + .get_storage_context([TEST_LEAF, b"subtree1"].as_ref().into(), None) + .unwrap(); + let mut iter = Element::iterator(storage_context.raw_iter()).unwrap(); + assert_eq!( + iter.next_element().unwrap().unwrap(), + Some((b"key1".to_vec(), element)) + ); + assert_eq!( + iter.next_element().unwrap().unwrap(), + Some((b"key2".to_vec(), element2)) + ); + let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); + assert_eq!(subtree_element.0, b"subtree11".to_vec()); + assert!(matches!(subtree_element.1, Element::Tree(..))); + let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); + assert_eq!(subtree_element.0, b"subtree12".to_vec()); + assert!(matches!(subtree_element.1, Element::Tree(..))); + assert!(matches!(iter.next_element().unwrap(), Ok(None))); + } + + #[test] + fn test_find_subtrees() { + let element = Element::new_item(b"ayy".to_vec()); + let db = make_test_grovedb(); + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element, + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( [TEST_LEAF].as_ref(), - item_key, - element1, + b"key4", + Element::empty_tree(), + None, None, - Some(&transaction), ) - .unwrap(); - - assert!(matches!(result, Ok(()))); - - db.rollback_transaction(&transaction).unwrap(); - - let result = db - .get([TEST_LEAF].as_ref(), item_key, Some(&transaction)) - .unwrap(); - assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); -} - -#[test] -fn transaction_should_be_aborted() { - let db = make_test_grovedb(); - let transaction = db.start_transaction(); - - let item_key = b"key3"; - let element = Element::new_item(b"ayy".to_vec()); + .unwrap() + .expect("successful subtree 3 insert"); + let subtrees = db + .find_subtrees(&[TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("cannot get subtrees"); + assert_eq!( + vec![ + vec![TEST_LEAF], + vec![TEST_LEAF, b"key1"], + vec![TEST_LEAF, b"key4"], + vec![TEST_LEAF, b"key1", b"key2"], + ], + subtrees + ); + } - db.insert( - [TEST_LEAF].as_ref(), - item_key, - element, - None, - Some(&transaction), - ) - .unwrap() - .unwrap(); + #[test] + fn test_root_subtree_has_root_key() { + let db = make_test_grovedb(); + let storage = db.db.get_storage_context(EMPTY_PATH, None).unwrap(); + let root_merk = Merk::open_base( + storage, + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("expected to get root merk"); + let (_, root_key, _) = root_merk + .root_hash_key_and_sum() + .unwrap() + .expect("expected to get root hash, key and sum"); + assert!(root_key.is_some()) + } - drop(transaction); + #[test] + fn test_get_subtree() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); - // Transactional data shouldn't be committed to the main database - let result = db.get([TEST_LEAF].as_ref(), item_key, None).unwrap(); - assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); -} + // Returns error is subtree is not valid + { + let subtree = db.get([TEST_LEAF].as_ref(), b"invalid_tree", None).unwrap(); + assert!(subtree.is_err()); -#[test] -fn test_subtree_pairs_iterator() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); - let element2 = Element::new_item(b"lmao".to_vec()); + // Doesn't return an error for subtree that exists but empty + let subtree = db.get(EMPTY_PATH, TEST_LEAF, None).unwrap(); + assert!(subtree.is_ok()); + } - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"subtree1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"subtree11", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), - b"key1", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - assert_eq!( - db.get( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), b"key1", - None + Element::empty_tree(), + None, + None, ) .unwrap() - .expect("successful get 1"), - element - ); - db.insert( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), - b"key0", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"subtree12", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"key1", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"key2", - element2.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Iterate over subtree1 to see if keys of other subtrees messed up - // let mut iter = db - // .elements_iterator([TEST_LEAF, b"subtree1"].as_ref(), None) - // .expect("cannot create iterator"); - let storage_context = db - .grove_db - .db - .get_storage_context([TEST_LEAF, b"subtree1"].as_ref().into(), None) - .unwrap(); - let mut iter = Element::iterator(storage_context.raw_iter()).unwrap(); - assert_eq!( - iter.next_element().unwrap().unwrap(), - Some((b"key1".to_vec(), element)) - ); - assert_eq!( - iter.next_element().unwrap().unwrap(), - Some((b"key2".to_vec(), element2)) - ); - let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); - assert_eq!(subtree_element.0, b"subtree11".to_vec()); - assert!(matches!(subtree_element.1, Element::Tree(..))); - let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); - assert_eq!(subtree_element.0, b"subtree12".to_vec()); - assert!(matches!(subtree_element.1, Element::Tree(..))); - assert!(matches!(iter.next_element().unwrap(), Ok(None))); -} + .expect("successful subtree 1 insert"); -#[test] -fn test_find_subtrees() { - let element = Element::new_item(b"ayy".to_vec()); - let db = make_test_grovedb(); - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element, - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); - let subtrees = db - .find_subtrees(&[TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("cannot get subtrees"); - assert_eq!( - vec![ - vec![TEST_LEAF], - vec![TEST_LEAF, b"key1"], - vec![TEST_LEAF, b"key4"], - vec![TEST_LEAF, b"key1", b"key2"], - ], - subtrees - ); -} + let key1_tree = db + .get(EMPTY_PATH, TEST_LEAF, None) + .unwrap() + .expect("expected to get a root tree"); + + assert!( + matches!(key1_tree, Element::Tree(Some(_), _)), + "{}", + format!( + "expected tree with root key, got {:?}", + if let Element::Tree(tree, ..) = key1_tree { + format!("{:?}", tree) + } else { + "not a tree".to_string() + } + ) + ); -#[test] -fn test_root_subtree_has_root_key() { - let db = make_test_grovedb(); - let storage = db.db.get_storage_context(EMPTY_PATH, None).unwrap(); - let root_merk = Merk::open_base( - storage, - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("expected to get root merk"); - let (_, root_key, _) = root_merk - .root_hash_key_and_sum() + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("expected to get root hash, key and sum"); - assert!(root_key.is_some()) -} - -#[test] -fn test_get_subtree() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); + .expect("successful subtree 2 insert"); - // Returns error is subtree is not valid - { - let subtree = db.get([TEST_LEAF].as_ref(), b"invalid_tree", None).unwrap(); - assert!(subtree.is_err()); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key4", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 3 insert"); - // Doesn't return an error for subtree that exists but empty - let subtree = db.get(EMPTY_PATH, TEST_LEAF, None).unwrap(); - assert!(subtree.is_ok()); - } + // Retrieve subtree instance + // Check if it returns the same instance that was inserted + { + let subtree_storage = db + .grove_db + .db + .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) + .unwrap(); + let subtree = Merk::open_layered_with_root_key( + subtree_storage, + Some(b"key3".to_vec()), + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("cannot open merk"); + let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); + assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); + } + // Insert a new tree with transaction + let transaction = db.start_transaction(); - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - - let key1_tree = db - .get(EMPTY_PATH, TEST_LEAF, None) - .unwrap() - .expect("expected to get a root tree"); - - assert!( - matches!(key1_tree, Element::Tree(Some(_), _)), - "{}", - format!( - "expected tree with root key, got {:?}", - if let Element::Tree(tree, ..) = key1_tree { - format!("{:?}", tree) - } else { - "not a tree".to_string() - } + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"innertree", + Element::empty_tree(), + None, + Some(&transaction), ) - ); + .unwrap() + .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); + db.insert( + [TEST_LEAF, b"key1", b"innertree"].as_ref(), + b"key4", + element, + None, + Some(&transaction), + ) + .unwrap() + .expect("successful value insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); + // Retrieve subtree instance with transaction + let subtree_storage = db + .grove_db + .db + .get_transactional_storage_context( + [TEST_LEAF, b"key1", b"innertree"].as_ref().into(), + None, + &transaction, + ) + .unwrap(); + let subtree = Merk::open_layered_with_root_key( + subtree_storage, + Some(b"key4".to_vec()), + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("cannot open merk"); + let result_element = Element::get(&subtree, b"key4", true).unwrap().unwrap(); + assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); - // Retrieve subtree instance - // Check if it returns the same instance that was inserted - { + // Should be able to retrieve instances created before transaction let subtree_storage = db .grove_db .db @@ -2867,490 +2936,430 @@ fn test_get_subtree() { let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); } - // Insert a new tree with transaction - let transaction = db.start_transaction(); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"innertree", - Element::empty_tree(), - None, - Some(&transaction), - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"key1", b"innertree"].as_ref(), - b"key4", - element, - None, - Some(&transaction), - ) - .unwrap() - .expect("successful value insert"); + #[test] + fn test_get_full_query() { + let db = make_test_grovedb(); - // Retrieve subtree instance with transaction - let subtree_storage = db - .grove_db - .db - .get_transactional_storage_context( - [TEST_LEAF, b"key1", b"innertree"].as_ref().into(), + // Insert a couple of subtrees first + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, None, - &transaction, ) - .unwrap(); - let subtree = Merk::open_layered_with_root_key( - subtree_storage, - Some(b"key4".to_vec()), - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("cannot open merk"); - let result_element = Element::get(&subtree, b"key4", true).unwrap().unwrap(); - assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); - - // Should be able to retrieve instances created before transaction - let subtree_storage = db - .grove_db - .db - .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) - .unwrap(); - let subtree = Merk::open_layered_with_root_key( - subtree_storage, - Some(b"key3".to_vec()), - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("cannot open merk"); - let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); - assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); -} - -#[test] -fn test_get_full_query() { - let db = make_test_grovedb(); - - // Insert a couple of subtrees first - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert some elements into subtree - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key3", - Element::new_item(b"ayya".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key4", - Element::new_item(b"ayyb".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key5", - Element::new_item(b"ayyc".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"key6", - Element::new_item(b"ayyd".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Test_Leaf - // ___________________________ - // / \ - // key1 key2 - // ___________________________ - // | | - // key4 key6 - // / \ - // key3 key5 - // - - let path1 = vec![TEST_LEAF.to_vec(), b"key1".to_vec()]; - let path2 = vec![TEST_LEAF.to_vec(), b"key2".to_vec()]; - let mut query1 = Query::new(); - let mut query2 = Query::new(); - query1.insert_range_inclusive(b"key3".to_vec()..=b"key4".to_vec()); - query2.insert_key(b"key6".to_vec()); - - let path_query1 = PathQuery::new_unsized(path1, query1); - // should get back key3, key4 - let path_query2 = PathQuery::new_unsized(path2, query2); - // should get back key6 - - assert_eq!( - db.query_many_raw( - &[&path_query1, &path_query2], - true, - true, - true, - QueryKeyElementPairResultType, - None + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, ) .unwrap() - .expect("expected successful get_query") - .to_key_elements(), - vec![ - (b"key3".to_vec(), Element::new_item(b"ayya".to_vec())), - (b"key4".to_vec(), Element::new_item(b"ayyb".to_vec())), - (b"key6".to_vec(), Element::new_item(b"ayyd".to_vec())), - ] - ); -} - -#[test] -fn test_aux_uses_separate_cf() { - let element = Element::new_item(b"ayy".to_vec()); - let db = make_test_grovedb(); - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - db.put_aux(b"key1", b"a", None, None) + .expect("successful subtree insert"); + // Insert some elements into subtree + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key3", + Element::new_item(b"ayya".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.put_aux(b"key2", b"b", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key4", + Element::new_item(b"ayyb".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.put_aux(b"key3", b"c", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key5", + Element::new_item(b"ayyc".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.delete_aux(b"key3", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), + b"key6", + Element::new_item(b"ayyd".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot delete from aux"); + .expect("successful value insert"); + + // Test_Leaf + // ___________________________ + // / \ + // key1 key2 + // ___________________________ + // | | + // key4 key6 + // / \ + // key3 key5 + // + + let path1 = vec![TEST_LEAF.to_vec(), b"key1".to_vec()]; + let path2 = vec![TEST_LEAF.to_vec(), b"key2".to_vec()]; + let mut query1 = Query::new(); + let mut query2 = Query::new(); + query1.insert_range_inclusive(b"key3".to_vec()..=b"key4".to_vec()); + query2.insert_key(b"key6".to_vec()); + + let path_query1 = PathQuery::new_unsized(path1, query1); + // should get back key3, key4 + let path_query2 = PathQuery::new_unsized(path2, query2); + // should get back key6 - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + assert_eq!( + db.query_many_raw( + &[&path_query1, &path_query2], + true, + true, + true, + QueryKeyElementPairResultType, + None + ) .unwrap() - .expect("cannot get element"), - element - ); - assert_eq!( - db.get_aux(b"key1", None) + .expect("expected successful get_query") + .to_key_elements(), + vec![ + (b"key3".to_vec(), Element::new_item(b"ayya".to_vec())), + (b"key4".to_vec(), Element::new_item(b"ayyb".to_vec())), + (b"key6".to_vec(), Element::new_item(b"ayyd".to_vec())), + ] + ); + } + + #[test] + fn test_aux_uses_separate_cf() { + let element = Element::new_item(b"ayy".to_vec()); + let db = make_test_grovedb(); + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + db.put_aux(b"key1", b"a", None, None) .unwrap() - .expect("cannot get from aux"), - Some(b"a".to_vec()) - ); - assert_eq!( - db.get_aux(b"key2", None) + .expect("cannot put aux"); + db.put_aux(b"key2", b"b", None, None) .unwrap() - .expect("cannot get from aux"), - Some(b"b".to_vec()) - ); - assert_eq!( - db.get_aux(b"key3", None) + .expect("cannot put aux"); + db.put_aux(b"key3", b"c", None, None) .unwrap() - .expect("cannot get from aux"), - None - ); - assert_eq!( - db.get_aux(b"key4", None) + .expect("cannot put aux"); + db.delete_aux(b"key3", None, None) .unwrap() - .expect("cannot get from aux"), - None - ); -} + .expect("cannot delete from aux"); + + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get element"), + element + ); + assert_eq!( + db.get_aux(b"key1", None) + .unwrap() + .expect("cannot get from aux"), + Some(b"a".to_vec()) + ); + assert_eq!( + db.get_aux(b"key2", None) + .unwrap() + .expect("cannot get from aux"), + Some(b"b".to_vec()) + ); + assert_eq!( + db.get_aux(b"key3", None) + .unwrap() + .expect("cannot get from aux"), + None + ); + assert_eq!( + db.get_aux(b"key4", None) + .unwrap() + .expect("cannot get from aux"), + None + ); + } -#[test] -fn test_aux_with_transaction() { - let element = Element::new_item(b"ayy".to_vec()); - let aux_value = b"ayylmao".to_vec(); - let key = b"key".to_vec(); - let db = make_test_grovedb(); - let transaction = db.start_transaction(); + #[test] + fn test_aux_with_transaction() { + let element = Element::new_item(b"ayy".to_vec()); + let aux_value = b"ayylmao".to_vec(); + let key = b"key".to_vec(); + let db = make_test_grovedb(); + let transaction = db.start_transaction(); - // Insert a regular data with aux data in the same transaction - db.insert( - [TEST_LEAF].as_ref(), - &key, - element, - None, - Some(&transaction), - ) - .unwrap() - .expect("unable to insert"); - db.put_aux(&key, &aux_value, None, Some(&transaction)) + // Insert a regular data with aux data in the same transaction + db.insert( + [TEST_LEAF].as_ref(), + &key, + element, + None, + Some(&transaction), + ) .unwrap() - .expect("unable to insert aux value"); - assert_eq!( - db.get_aux(&key, Some(&transaction)) + .expect("unable to insert"); + db.put_aux(&key, &aux_value, None, Some(&transaction)) .unwrap() - .expect("unable to get aux value"), - Some(aux_value.clone()) - ); - // Cannot reach the data outside of transaction - assert_eq!( - db.get_aux(&key, None) + .expect("unable to insert aux value"); + assert_eq!( + db.get_aux(&key, Some(&transaction)) + .unwrap() + .expect("unable to get aux value"), + Some(aux_value.clone()) + ); + // Cannot reach the data outside of transaction + assert_eq!( + db.get_aux(&key, None) + .unwrap() + .expect("unable to get aux value"), + None + ); + // And should be able to get data when committed + db.commit_transaction(transaction) .unwrap() - .expect("unable to get aux value"), - None - ); - // And should be able to get data when committed - db.commit_transaction(transaction) + .expect("unable to commit transaction"); + assert_eq!( + db.get_aux(&key, None) + .unwrap() + .expect("unable to get committed aux value"), + Some(aux_value) + ); + } + + #[test] + fn test_root_hash() { + let db = make_test_grovedb(); + // Check hashes are different if tree is edited + let old_root_hash = db.root_hash(None).unwrap(); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() - .expect("unable to commit transaction"); - assert_eq!( - db.get_aux(&key, None) - .unwrap() - .expect("unable to get committed aux value"), - Some(aux_value) - ); -} + .expect("unable to insert an item"); + assert_ne!(old_root_hash.unwrap(), db.root_hash(None).unwrap().unwrap()); -#[test] -fn test_root_hash() { - let db = make_test_grovedb(); - // Check hashes are different if tree is edited - let old_root_hash = db.root_hash(None).unwrap(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("unable to insert an item"); - assert_ne!(old_root_hash.unwrap(), db.root_hash(None).unwrap().unwrap()); + // Check isolation + let transaction = db.start_transaction(); - // Check isolation - let transaction = db.start_transaction(); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::new_item(b"ayy".to_vec()), + None, + Some(&transaction), + ) + .unwrap() + .expect("unable to insert an item"); + let root_hash_outside = db.root_hash(None).unwrap().unwrap(); + assert_ne!( + db.root_hash(Some(&transaction)).unwrap().unwrap(), + root_hash_outside + ); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::new_item(b"ayy".to_vec()), - None, - Some(&transaction), - ) - .unwrap() - .expect("unable to insert an item"); - let root_hash_outside = db.root_hash(None).unwrap().unwrap(); - assert_ne!( - db.root_hash(Some(&transaction)).unwrap().unwrap(), - root_hash_outside - ); + assert_eq!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); + db.commit_transaction(transaction).unwrap().unwrap(); + assert_ne!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); + } - assert_eq!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); - db.commit_transaction(transaction).unwrap().unwrap(); - assert_ne!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); -} + #[test] + fn test_get_non_existing_root_leaf() { + let db = make_test_grovedb(); + assert!(db.get(EMPTY_PATH, b"ayy", None).unwrap().is_err()); + } -#[test] -fn test_get_non_existing_root_leaf() { - let db = make_test_grovedb(); - assert!(db.get(EMPTY_PATH, b"ayy", None).unwrap().is_err()); -} + #[test] + fn test_check_subtree_exists_function() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"key_scalar", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) + .unwrap() + .expect("cannot insert item"); + db.insert( + [TEST_LEAF].as_ref(), + b"key_subtree", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("cannot insert item"); -#[test] -fn test_check_subtree_exists_function() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key_scalar", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - db.insert( - [TEST_LEAF].as_ref(), - b"key_subtree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); + // Empty tree path means root always exist + assert!(db + .check_subtree_exists_invalid_path(EMPTY_PATH, None) + .unwrap() + .is_ok()); - // Empty tree path means root always exist - assert!(db - .check_subtree_exists_invalid_path(EMPTY_PATH, None) - .unwrap() - .is_ok()); + // TEST_LEAF should be a tree + assert!(db + .check_subtree_exists_invalid_path([TEST_LEAF].as_ref().into(), None) + .unwrap() + .is_ok()); - // TEST_LEAF should be a tree - assert!(db - .check_subtree_exists_invalid_path([TEST_LEAF].as_ref().into(), None) - .unwrap() - .is_ok()); + // TEST_LEAF.key_subtree should be a tree + assert!(db + .check_subtree_exists_invalid_path([TEST_LEAF, b"key_subtree"].as_ref().into(), None) + .unwrap() + .is_ok()); + + // TEST_LEAF.key_scalar should NOT be a tree + assert!(matches!( + db.check_subtree_exists_invalid_path([TEST_LEAF, b"key_scalar"].as_ref().into(), None) + .unwrap(), + Err(Error::InvalidPath(_)) + )); + } - // TEST_LEAF.key_subtree should be a tree - assert!(db - .check_subtree_exists_invalid_path([TEST_LEAF, b"key_subtree"].as_ref().into(), None) + #[test] + fn test_tree_value_exists_method_no_tx() { + let db = make_test_grovedb(); + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() - .is_ok()); + .expect("cannot insert item"); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); + assert!(!db + .has_raw([TEST_LEAF].as_ref(), b"badkey", None) + .unwrap() + .unwrap()); - // TEST_LEAF.key_scalar should NOT be a tree - assert!(matches!( - db.check_subtree_exists_invalid_path([TEST_LEAF, b"key_scalar"].as_ref().into(), None) - .unwrap(), - Err(Error::InvalidPath(_)) - )); -} + // Test keys for a root tree + db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, None) + .unwrap() + .expect("cannot insert item"); -#[test] -fn test_tree_value_exists_method_no_tx() { - let db = make_test_grovedb(); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", None) - .unwrap() - .unwrap()); - assert!(!db - .has_raw([TEST_LEAF].as_ref(), b"badkey", None) - .unwrap() - .unwrap()); + assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + assert!(db.has_raw(EMPTY_PATH, TEST_LEAF, None).unwrap().unwrap()); + assert!(!db.has_raw(EMPTY_PATH, b"badleaf", None).unwrap().unwrap()); + } - // Test keys for a root tree - db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, None) + #[test] + fn test_tree_value_exists_method_tx() { + let db = make_test_grovedb(); + let tx = db.start_transaction(); + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + Some(&tx), + ) .unwrap() .expect("cannot insert item"); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", Some(&tx)) + .unwrap() + .unwrap()); + assert!(!db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); - assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); - assert!(db.has_raw(EMPTY_PATH, TEST_LEAF, None).unwrap().unwrap()); - assert!(!db.has_raw(EMPTY_PATH, b"badleaf", None).unwrap().unwrap()); -} + // Test keys for a root tree + db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, Some(&tx)) + .unwrap() + .expect("cannot insert item"); + assert!(db.has_raw(EMPTY_PATH, b"leaf", Some(&tx)).unwrap().unwrap()); + assert!(!db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); -#[test] -fn test_tree_value_exists_method_tx() { - let db = make_test_grovedb(); - let tx = db.start_transaction(); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - Some(&tx), - ) - .unwrap() - .expect("cannot insert item"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", Some(&tx)) - .unwrap() - .unwrap()); - assert!(!db - .has_raw([TEST_LEAF].as_ref(), b"key", None) - .unwrap() - .unwrap()); + db.commit_transaction(tx) + .unwrap() + .expect("cannot commit transaction"); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); + assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + } + + #[test] + fn test_storage_wipe() { + let db = make_test_grovedb(); + let _path = db._tmp_dir.path(); - // Test keys for a root tree - db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, Some(&tx)) + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() .expect("cannot insert item"); - assert!(db.has_raw(EMPTY_PATH, b"leaf", Some(&tx)).unwrap().unwrap()); - assert!(!db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); - db.commit_transaction(tx) - .unwrap() - .expect("cannot commit transaction"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", None) - .unwrap() - .unwrap()); - assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); -} + // retrieve key before wipe + let elem = db.get(&[TEST_LEAF], b"key", None).unwrap().unwrap(); + assert_eq!(elem, Element::new_item(b"ayy".to_vec())); -#[test] -fn test_storage_wipe() { - let db = make_test_grovedb(); - let _path = db._tmp_dir.path(); + // wipe the database + db.grove_db.wipe().unwrap(); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - - // retrieve key before wipe - let elem = db.get(&[TEST_LEAF], b"key", None).unwrap().unwrap(); - assert_eq!(elem, Element::new_item(b"ayy".to_vec())); - - // wipe the database - db.grove_db.wipe().unwrap(); - - // retrieve key after wipe - let elem_result = db.get(&[TEST_LEAF], b"key", None).unwrap(); - assert!(elem_result.is_err()); - assert!(matches!( - elem_result, - Err(Error::PathParentLayerNotFound(..)) - )); + // retrieve key after wipe + let elem_result = db.get(&[TEST_LEAF], b"key", None).unwrap(); + assert!(elem_result.is_err()); + assert!(matches!( + elem_result, + Err(Error::PathParentLayerNotFound(..)) + )); + } } diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index fd99e581..254a4d06 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -34,6 +34,7 @@ use tempfile::TempDir; use crate::{ batch::GroveDbOp, + operations::proof::ProveOptions, query_result_type::{PathKeyOptionalElementTrio, QueryResultType}, reference_path::ReferencePathType, tests::{ @@ -42,7 +43,6 @@ use crate::{ }, Element, GroveDb, PathQuery, SizedQuery, }; -use crate::operations::proof_v2::ProveOptions; fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { // Insert a couple of subtrees first @@ -355,8 +355,8 @@ fn populate_tree_for_uneven_keys(db: &TempGroveDb) { None, None, ) - .unwrap() - .expect("successful subtree insert"); + .unwrap() + .expect("successful subtree insert"); db.insert( [TEST_LEAF].as_ref(), @@ -365,8 +365,8 @@ fn populate_tree_for_uneven_keys(db: &TempGroveDb) { None, None, ) - .unwrap() - .expect("successful subtree insert"); + .unwrap() + .expect("successful subtree insert"); db.insert( [TEST_LEAF].as_ref(), @@ -375,18 +375,18 @@ fn populate_tree_for_uneven_keys(db: &TempGroveDb) { None, None, ) - .unwrap() - .expect("successful subtree insert"); + .unwrap() + .expect("successful subtree insert"); db.insert( [TEST_LEAF].as_ref(), - &[3;32], + &[3; 32], Element::new_item(4u8.to_be_bytes().to_vec()), None, None, ) - .unwrap() - .expect("successful subtree insert"); + .unwrap() + .expect("successful subtree insert"); db.insert( [TEST_LEAF].as_ref(), @@ -395,8 +395,8 @@ fn populate_tree_for_uneven_keys(db: &TempGroveDb) { None, None, ) - .unwrap() - .expect("successful subtree insert"); + .unwrap() + .expect("successful subtree insert"); } #[test] @@ -2379,10 +2379,16 @@ fn test_subset_proof_verification() { ); // prove verbose - let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { - is_verbose: true, - multilevel_results: false, - })).unwrap().unwrap(); + let verbose_proof = db + .prove_query( + &path_query, + Some(ProveOptions { + is_verbose: true, + multilevel_results: false, + }), + ) + .unwrap() + .unwrap(); assert!(verbose_proof.len() > proof.len()); // subset path query @@ -2406,313 +2412,315 @@ fn test_subset_proof_verification() { ) ); } - -#[test] -fn test_chained_path_query_verification() { - let db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - let mut subsubq = Query::new(); - subsubq.insert_all(); - - subq.set_subquery(subsubq); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], query); - - // first prove non verbose - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 14); - - // prove verbose - let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { - is_verbose: true, - multilevel_results: false, - })).unwrap().unwrap(); - assert!(verbose_proof.len() > proof.len()); - - // init deeper_1 path query - let mut query = Query::new(); - query.insert_all(); - - let deeper_1_path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec(), - ], - query, - ); - - // define the path query generators - let mut chained_path_queries = vec![]; - chained_path_queries.push(|_elements: Vec| { - let mut query = Query::new(); - query.insert_all(); - - let deeper_2_path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_2".to_vec(), - ], - query, - ); - Some(deeper_2_path_query) - }); - - // verify the path query chain - let (root_hash, results) = GroveDb::verify_query_with_chained_path_queries( - &verbose_proof, - &deeper_1_path_query, - chained_path_queries, - ) - .unwrap(); - assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(results.len(), 2); - assert_eq!(results[0].len(), 3); - assert_eq!( - results[0][0], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) - ) - ); - assert_eq!( - results[0][1], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) - ) - ); - assert_eq!( - results[0][2], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) - ) - ); - - assert_eq!(results[1].len(), 3); - assert_eq!( - results[1][0], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_2".to_vec() - ], - b"key4".to_vec(), - Some(Element::new_item(b"value4".to_vec())) - ) - ); - assert_eq!( - results[1][1], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_2".to_vec() - ], - b"key5".to_vec(), - Some(Element::new_item(b"value5".to_vec())) - ) - ); - assert_eq!( - results[1][2], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_2".to_vec() - ], - b"key6".to_vec(), - Some(Element::new_item(b"value6".to_vec())) - ) - ); -} - -#[test] -fn test_query_b_depends_on_query_a() { - // we have two trees - // one with a mapping of id to name - // another with a mapping of name to age - // we want to get the age of every one after a certain id ordered by name - let db = make_test_grovedb(); - - // TEST_LEAF contains the id to name mapping - db.insert( - [TEST_LEAF].as_ref(), - &[1], - Element::new_item(b"d".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[2], - Element::new_item(b"b".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[3], - Element::new_item(b"c".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[4], - Element::new_item(b"a".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - - // ANOTHER_TEST_LEAF contains the name to age mapping - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"a", - Element::new_item(vec![10]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"b", - Element::new_item(vec![30]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"c", - Element::new_item(vec![12]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"d", - Element::new_item(vec![46]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - - // Query: return the age of everyone greater than id 2 ordered by name - // id 2 - b - // so we want to return the age for c and d = 12, 46 respectively - // the proof generator knows that id 2 = b, but the verifier doesn't - // hence we need to generate two proofs - // prove that 2 - b then prove age after b - // the verifier has to use the result of the first proof 2 - b - // to generate the path query for the verification of the second proof - - // query name associated with id 2 - let mut query = Query::new(); - query.insert_key(vec![2]); - let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - // first we show that this returns the correct output - let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_one).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); - - // next query should return the age for elements above b - let mut query = Query::new(); - query.insert_range_after(b"b".to_vec()..); - let path_query_two = PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); - - // show that we get the correct output - let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_two).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); - assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); - - // now we merge the path queries - let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, &path_query_two]).unwrap(); - merged_path_queries.query.limit = Some(3); - let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { - is_verbose: true, - multilevel_results: false, - })).unwrap().unwrap(); - - // verifier only has access to the statement age > 2 - // need to first get the name associated with 2 from the proof - // then use that to construct the next path query - let mut chained_path_queries = vec![]; - chained_path_queries.push(|prev_elements: Vec| { - let mut query = Query::new(); - let name_element = prev_elements[0].2.as_ref().unwrap(); - if let Element::Item(name, ..) = name_element { - query.insert_range_after(name.to_owned()..); - Some(PathQuery::new( - vec![ANOTHER_TEST_LEAF.to_vec()], - SizedQuery::new(query, Some(2), None), - )) - } else { - None - } - }); - - // add limit to path query one - path_query_one.query.limit = Some(1); - - let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( - proof.as_slice(), - &path_query_one, - chained_path_queries, - ) - .unwrap(); - assert_eq!(result_set.len(), 2); - assert_eq!(result_set[0].len(), 1); - assert_eq!(result_set[1].len(), 2); - - let age_result = result_set[1].clone(); - assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); - assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); -} +// #[test] +// fn test_chained_path_query_verification() { +// let db = make_deep_tree(); +// +// let mut query = Query::new(); +// query.insert_all(); +// let mut subq = Query::new(); +// subq.insert_all(); +// let mut subsubq = Query::new(); +// subsubq.insert_all(); +// +// subq.set_subquery(subsubq); +// query.set_subquery(subq); +// +// let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], +// query); +// +// // first prove non verbose +// let proof = db.prove_query(&path_query, None).unwrap().unwrap(); +// let (hash, result_set) = GroveDb::verify_query(&proof, +// &path_query).unwrap(); assert_eq!(hash, +// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 14); +// +// // prove verbose +// let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { +// is_verbose: true, +// multilevel_results: false, +// })).unwrap().unwrap(); +// assert!(verbose_proof.len() > proof.len()); +// +// // init deeper_1 path query +// let mut query = Query::new(); +// query.insert_all(); +// +// let deeper_1_path_query = PathQuery::new_unsized( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_1".to_vec(), +// ], +// query, +// ); +// +// // define the path query generators +// let mut chained_path_queries = vec![]; +// chained_path_queries.push(|_elements: Vec| { +// let mut query = Query::new(); +// query.insert_all(); +// +// let deeper_2_path_query = PathQuery::new_unsized( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_2".to_vec(), +// ], +// query, +// ); +// Some(deeper_2_path_query) +// }); +// +// // verify the path query chain +// let (root_hash, results) = +// GroveDb::verify_query_with_chained_path_queries( &verbose_proof, +// &deeper_1_path_query, +// chained_path_queries, +// ) +// .unwrap(); +// assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); +// assert_eq!(results.len(), 2); +// assert_eq!(results[0].len(), 3); +// assert_eq!( +// results[0][0], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_1".to_vec() +// ], +// b"key1".to_vec(), +// Some(Element::new_item(b"value1".to_vec())) +// ) +// ); +// assert_eq!( +// results[0][1], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_1".to_vec() +// ], +// b"key2".to_vec(), +// Some(Element::new_item(b"value2".to_vec())) +// ) +// ); +// assert_eq!( +// results[0][2], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_1".to_vec() +// ], +// b"key3".to_vec(), +// Some(Element::new_item(b"value3".to_vec())) +// ) +// ); +// +// assert_eq!(results[1].len(), 3); +// assert_eq!( +// results[1][0], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_2".to_vec() +// ], +// b"key4".to_vec(), +// Some(Element::new_item(b"value4".to_vec())) +// ) +// ); +// assert_eq!( +// results[1][1], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_2".to_vec() +// ], +// b"key5".to_vec(), +// Some(Element::new_item(b"value5".to_vec())) +// ) +// ); +// assert_eq!( +// results[1][2], +// ( +// vec![ +// b"deep_leaf".to_vec(), +// b"deep_node_1".to_vec(), +// b"deeper_2".to_vec() +// ], +// b"key6".to_vec(), +// Some(Element::new_item(b"value6".to_vec())) +// ) +// ); +// } +// +// #[test] +// fn test_query_b_depends_on_query_a() { +// // we have two trees +// // one with a mapping of id to name +// // another with a mapping of name to age +// // we want to get the age of every one after a certain id ordered by name +// let db = make_test_grovedb(); +// +// // TEST_LEAF contains the id to name mapping +// db.insert( +// [TEST_LEAF].as_ref(), +// &[1], +// Element::new_item(b"d".to_vec()), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [TEST_LEAF].as_ref(), +// &[2], +// Element::new_item(b"b".to_vec()), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [TEST_LEAF].as_ref(), +// &[3], +// Element::new_item(b"c".to_vec()), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [TEST_LEAF].as_ref(), +// &[4], +// Element::new_item(b"a".to_vec()), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// +// // ANOTHER_TEST_LEAF contains the name to age mapping +// db.insert( +// [ANOTHER_TEST_LEAF].as_ref(), +// b"a", +// Element::new_item(vec![10]), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [ANOTHER_TEST_LEAF].as_ref(), +// b"b", +// Element::new_item(vec![30]), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [ANOTHER_TEST_LEAF].as_ref(), +// b"c", +// Element::new_item(vec![12]), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// db.insert( +// [ANOTHER_TEST_LEAF].as_ref(), +// b"d", +// Element::new_item(vec![46]), +// None, +// None, +// ) +// .unwrap() +// .expect("successful root tree leaf insert"); +// +// // Query: return the age of everyone greater than id 2 ordered by name +// // id 2 - b +// // so we want to return the age for c and d = 12, 46 respectively +// // the proof generator knows that id 2 = b, but the verifier doesn't +// // hence we need to generate two proofs +// // prove that 2 - b then prove age after b +// // the verifier has to use the result of the first proof 2 - b +// // to generate the path query for the verification of the second proof +// +// // query name associated with id 2 +// let mut query = Query::new(); +// query.insert_key(vec![2]); +// let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], +// query); +// +// // first we show that this returns the correct output +// let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); +// let (hash, result_set) = GroveDb::verify_query(&proof, +// &path_query_one).unwrap(); assert_eq!(hash, +// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); +// assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); +// +// // next query should return the age for elements above b +// let mut query = Query::new(); +// query.insert_range_after(b"b".to_vec()..); +// let path_query_two = +// PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); +// +// // show that we get the correct output +// let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); +// let (hash, result_set) = GroveDb::verify_query(&proof, +// &path_query_two).unwrap(); assert_eq!(hash, +// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); +// assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); +// assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); +// +// // now we merge the path queries +// let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, +// &path_query_two]).unwrap(); merged_path_queries.query.limit = Some(3); +// let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { +// is_verbose: true, +// multilevel_results: false, +// })).unwrap().unwrap(); +// +// // verifier only has access to the statement age > 2 +// // need to first get the name associated with 2 from the proof +// // then use that to construct the next path query +// let mut chained_path_queries = vec![]; +// chained_path_queries.push(|prev_elements: +// Vec| { let mut query = Query::new(); +// let name_element = prev_elements[0].2.as_ref().unwrap(); +// if let Element::Item(name, ..) = name_element { +// query.insert_range_after(name.to_owned()..); +// Some(PathQuery::new( +// vec![ANOTHER_TEST_LEAF.to_vec()], +// SizedQuery::new(query, Some(2), None), +// )) +// } else { +// None +// } +// }); +// +// // add limit to path query one +// path_query_one.query.limit = Some(1); +// +// let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( +// proof.as_slice(), +// &path_query_one, +// chained_path_queries, +// ) +// .unwrap(); +// assert_eq!(result_set.len(), 2); +// assert_eq!(result_set[0].len(), 1); +// assert_eq!(result_set[1].len(), 2); +// +// let age_result = result_set[1].clone(); +// assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); +// assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); +// } #[test] fn test_prove_absent_path_with_intermediate_emtpy_tree() { diff --git a/merk/src/merk/chunks.rs b/merk/src/merk/chunks.rs index f6b1b64c..ef94571e 100644 --- a/merk/src/merk/chunks.rs +++ b/merk/src/merk/chunks.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - use std::collections::VecDeque; use ed::Encode; diff --git a/merk/src/merk/prove.rs b/merk/src/merk/prove.rs index 9e18b02e..99227c13 100644 --- a/merk/src/merk/prove.rs +++ b/merk/src/merk/prove.rs @@ -97,6 +97,38 @@ where .map_ok(|(proof, _, limit, ..)| (proof, limit)) }) } + + /// Creates a Merkle proof for the list of queried keys. For each key in + /// the query, if the key is found in the store then the value will be + /// proven to be in the tree. For each key in the query that does not + /// exist in the tree, its absence will be proven by including + /// boundary keys. + /// The proof returned is in an encoded format which can be verified with + /// `merk::verify`. + /// + /// This is unsafe because the keys in `query` must be sorted and unique - + /// if they are not, there will be undefined behavior. For a safe version + /// of this method which checks to ensure the batch is sorted and + /// unique, see `prove`. + pub fn prove_unchecked_query_items( + &self, + query_items: &[QueryItem], + limit: Option, + left_to_right: bool, + ) -> CostResult { + self.use_tree_mut(|maybe_tree| { + maybe_tree + .ok_or(Error::CorruptedCodeExecution( + "Cannot create proof for empty tree", + )) + .wrap_with_cost(Default::default()) + .flat_map_ok(|tree| { + let mut ref_walker = RefWalker::new(tree, self.source()); + ref_walker.create_proof(query_items, limit, left_to_right) + }) + .map_ok(|(proof, _, limit, ..)| (proof, limit)) + }) + } } type Proof = (LinkedList, Option); @@ -112,10 +144,7 @@ pub struct ProofConstructionResult { impl ProofConstructionResult { /// New ProofConstructionResult pub fn new(proof: Vec, limit: Option) -> Self { - Self { - proof, - limit, - } + Self { proof, limit } } } @@ -130,9 +159,6 @@ pub struct ProofWithoutEncodingResult { impl ProofWithoutEncodingResult { /// New ProofWithoutEncodingResult pub fn new(proof: LinkedList, limit: Option) -> Self { - Self { - proof, - limit, - } + Self { proof, limit } } } diff --git a/merk/src/proofs/chunk.rs b/merk/src/proofs/chunk.rs index 22334688..063a3575 100644 --- a/merk/src/proofs/chunk.rs +++ b/merk/src/proofs/chunk.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Chunk proofs mod binary_range; diff --git a/merk/src/proofs/mod.rs b/merk/src/proofs/mod.rs index 1bedeec5..bf3718f4 100644 --- a/merk/src/proofs/mod.rs +++ b/merk/src/proofs/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk proofs #[cfg(feature = "full")] diff --git a/merk/src/proofs/query/map.rs b/merk/src/proofs/query/map.rs index 9eb716ed..0718285b 100644 --- a/merk/src/proofs/query/map.rs +++ b/merk/src/proofs/query/map.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Query #![allow(unstable_name_collisions)] diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index a26f0e56..8ab86ea1 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -660,22 +660,12 @@ where let (mut right_proof, right_absence, new_limit) = if left_to_right { cost_return_on_error!( &mut cost, - self.create_child_proof( - proof_direction, - right_items, - new_limit, - left_to_right, - ) + self.create_child_proof(proof_direction, right_items, new_limit, left_to_right,) ) } else { cost_return_on_error!( &mut cost, - self.create_child_proof( - proof_direction, - left_items, - new_limit, - left_to_right, - ) + self.create_child_proof(proof_direction, left_items, new_limit, left_to_right,) ) }; @@ -727,12 +717,7 @@ where } } - Ok(( - proof, - (left_absence.0, right_absence.1), - new_limit, - )) - .wrap_with_cost(cost) + Ok((proof, (left_absence.0, right_absence.1), new_limit)).wrap_with_cost(cost) } /// Similar to `create_proof`. Recurses into the child on the given side and @@ -764,8 +749,7 @@ where }); Ok((proof, (false, false), limit)).wrap_with_cost(Default::default()) } else { - Ok((LinkedList::new(), (false, false), limit)) - .wrap_with_cost(Default::default()) + Ok((LinkedList::new(), (false, false), limit)).wrap_with_cost(Default::default()) } } } @@ -1185,15 +1169,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); } @@ -1245,15 +1223,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); } @@ -1309,15 +1281,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3]), (vec![7], vec![7])]); } @@ -1381,15 +1347,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])], @@ -1443,15 +1403,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); } @@ -1505,15 +1459,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); } @@ -1698,15 +1646,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -1850,15 +1792,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -1867,7 +1803,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // skip 1 element let mut tree = make_tree_seq(10); @@ -1901,7 +1836,6 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_tree_seq(10); @@ -1932,7 +1866,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - // skip all elements let mut tree = make_tree_seq(10); @@ -1978,19 +1911,13 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); + let mut query = Query::new_with_direction(false); for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -2090,15 +2017,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -2108,7 +2029,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // skip 1 element let mut tree = make_tree_seq(10); @@ -2142,7 +2062,6 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_tree_seq(10); @@ -2176,7 +2095,6 @@ mod test { vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], ); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_tree_seq(10); @@ -2226,15 +2144,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, @@ -2262,22 +2174,15 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60])], ); assert_eq!(res.limit, None); - } #[test] @@ -2344,21 +2249,14 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -2396,7 +2294,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -2438,7 +2335,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -2479,7 +2375,6 @@ mod test { vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); assert_eq!(res.limit, Some(97)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -2508,7 +2403,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -2537,7 +2431,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![8], vec![8])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -2585,15 +2478,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], @@ -2627,7 +2514,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -2717,15 +2603,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -2736,7 +2616,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -2774,7 +2653,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -2812,7 +2690,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -2858,7 +2735,6 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -2887,7 +2763,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -2916,7 +2791,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -2964,15 +2838,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3011,7 +2879,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -3101,15 +2968,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3120,7 +2981,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3158,7 +3018,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -3196,7 +3055,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -3242,7 +3100,6 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -3271,7 +3128,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -3300,7 +3156,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -3348,15 +3203,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3395,7 +3244,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -3485,15 +3333,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3504,7 +3346,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3542,7 +3383,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -3580,7 +3420,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -3626,7 +3465,6 @@ mod test { ], ); assert_eq!(res.limit, Some(96)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -3655,7 +3493,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -3684,7 +3521,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -3732,15 +3568,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3782,7 +3612,6 @@ mod test { vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], ); assert_eq!(res.limit, Some(0)); - } #[test] @@ -3867,18 +3696,11 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3916,7 +3738,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -3954,7 +3775,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -3992,7 +3812,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(98)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -4021,7 +3840,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -4050,7 +3868,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); - // skip all elements let mut tree = make_6_node_tree(); @@ -4098,15 +3915,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); let mut tree = make_6_node_tree(); @@ -4137,7 +3948,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(299)); - } #[test] @@ -4221,21 +4031,14 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -4273,7 +4076,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -4311,7 +4113,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -4352,7 +4153,6 @@ mod test { vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); assert_eq!(res.limit, Some(97)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -4381,7 +4181,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -4410,7 +4209,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -4568,15 +4366,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -4589,7 +4381,6 @@ mod test { ], ); assert_eq!(res.limit, None); - // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -4627,7 +4418,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 2 items let mut tree = make_6_node_tree(); @@ -4665,7 +4455,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); - // Limit result set to 100 items let mut tree = make_6_node_tree(); @@ -4713,7 +4502,6 @@ mod test { ], ); assert_eq!(res.limit, Some(94)); - // skip 1 element let mut tree = make_6_node_tree(); @@ -4745,7 +4533,6 @@ mod test { vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], ); assert_eq!(res.limit, Some(0)); - // skip 2 elements let mut tree = make_6_node_tree(); @@ -4774,7 +4561,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - // skip all elements let mut tree = make_6_node_tree(); @@ -4822,15 +4608,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -4871,7 +4651,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -4951,7 +4730,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -5034,7 +4812,6 @@ mod test { .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - } #[test] @@ -5122,19 +4899,13 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); + let mut query = Query::new_with_direction(false); for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -5236,15 +5007,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -5343,15 +5108,9 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index bb3283f0..452d568c 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -202,11 +202,14 @@ pub fn execute_proof( } } } - println!("pushing {:?}", ProvedKeyValue { - key: key.clone(), - value: val.clone(), - proof: value_hash, - }); + println!( + "pushing {:?}", + ProvedKeyValue { + key: key.clone(), + value: val.clone(), + proof: value_hash, + } + ); // add data to output output.push(ProvedKeyValue { key: key.clone(), diff --git a/merk/src/proofs/tree.rs b/merk/src/proofs/tree.rs index b3bf9cf1..16655a6d 100644 --- a/merk/src/proofs/tree.rs +++ b/merk/src/proofs/tree.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Tree proofs #[cfg(feature = "full")] diff --git a/merk/src/tree/commit.rs b/merk/src/tree/commit.rs index 24c1d996..31b0df5c 100644 --- a/merk/src/tree/commit.rs +++ b/merk/src/tree/commit.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree commit #[cfg(feature = "full")] diff --git a/merk/src/tree/encoding.rs b/merk/src/tree/encoding.rs index 29307246..3a97c895 100644 --- a/merk/src/tree/encoding.rs +++ b/merk/src/tree/encoding.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree encoding #[cfg(feature = "full")] diff --git a/merk/src/tree/fuzz_tests.rs b/merk/src/tree/fuzz_tests.rs index 631918ff..2f3067d1 100644 --- a/merk/src/tree/fuzz_tests.rs +++ b/merk/src/tree/fuzz_tests.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Fuzz tests #![cfg(tests)] diff --git a/merk/src/tree/hash.rs b/merk/src/tree/hash.rs index d6d45c9f..e23566a9 100644 --- a/merk/src/tree/hash.rs +++ b/merk/src/tree/hash.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree hash #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/tree/iter.rs b/merk/src/tree/iter.rs index 6ca58df7..03cca6ea 100644 --- a/merk/src/tree/iter.rs +++ b/merk/src/tree/iter.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree iterator #[cfg(feature = "full")] diff --git a/merk/src/tree/kv.rs b/merk/src/tree/kv.rs index ff020abc..b10733fc 100644 --- a/merk/src/tree/kv.rs +++ b/merk/src/tree/kv.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree key-values #[cfg(feature = "full")] diff --git a/merk/src/tree/link.rs b/merk/src/tree/link.rs index fa0d1563..f445dd11 100644 --- a/merk/src/tree/link.rs +++ b/merk/src/tree/link.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree link #[cfg(feature = "full")] diff --git a/merk/src/tree/mod.rs b/merk/src/tree/mod.rs index 401b8722..9a29dc8e 100644 --- a/merk/src/tree/mod.rs +++ b/merk/src/tree/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk trees #[cfg(feature = "full")] diff --git a/merk/src/tree/ops.rs b/merk/src/tree/ops.rs index da481d0a..738a89df 100644 --- a/merk/src/tree/ops.rs +++ b/merk/src/tree/ops.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree ops #[cfg(feature = "full")] diff --git a/merk/src/tree/tree_feature_type.rs b/merk/src/tree/tree_feature_type.rs index c1fceed3..c47fb0d6 100644 --- a/merk/src/tree/tree_feature_type.rs +++ b/merk/src/tree/tree_feature_type.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree feature type #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/tree/walk/fetch.rs b/merk/src/tree/walk/fetch.rs index 08b66d99..e99df5bd 100644 --- a/merk/src/tree/walk/fetch.rs +++ b/merk/src/tree/walk/fetch.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Walk #[cfg(feature = "full")] diff --git a/merk/src/tree/walk/mod.rs b/merk/src/tree/walk/mod.rs index e5401814..adf2a07d 100644 --- a/merk/src/tree/walk/mod.rs +++ b/merk/src/tree/walk/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree walk #[cfg(feature = "full")] diff --git a/merk/src/tree/walk/ref_walker.rs b/merk/src/tree/walk/ref_walker.rs index d9fb1bcd..17f4e6c4 100644 --- a/merk/src/tree/walk/ref_walker.rs +++ b/merk/src/tree/walk/ref_walker.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk reference walker #[cfg(feature = "full")] From 04ce2b373d8513b95a1591ee443f477d027a2ec2 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Thu, 4 Jul 2024 07:39:35 +0700 Subject: [PATCH 05/34] more work on new proof system --- grovedb/src/element/mod.rs | 67 +++++++++++++ grovedb/src/operations/proof/generate.rs | 117 ++++++++++++++++++++++- grovedb/src/operations/proof/verify.rs | 57 +++++++++-- grovedb/src/query_result_type.rs | 47 +++++++++ grovedb/src/reference_path.rs | 51 ++++++++++ grovedb/src/tests/mod.rs | 43 ++++----- merk/src/proofs/encoding.rs | 28 ------ merk/src/proofs/query/verify.rs | 25 ++++- 8 files changed, 368 insertions(+), 67 deletions(-) diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index 4c29c400..a88429d8 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -111,6 +111,73 @@ pub enum Element { SumTree(Option>, SumValue, Option), } +impl fmt::Display for Element { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Element::Item(data, flags) => { + write!( + f, + "Item({}{})", + hex_to_ascii(data), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::Reference(path, max_hop, flags) => { + write!( + f, + "Reference({}, max_hop: {}{})", + path, + max_hop.map_or("None".to_string(), |h| h.to_string()), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::Tree(root_key, flags) => { + write!( + f, + "Tree({}{})", + root_key + .as_ref() + .map_or("None".to_string(), |k| hex::encode(k)), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::SumItem(sum_value, flags) => { + write!( + f, + "SumItem({}{}", + sum_value, + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::SumTree(root_key, sum_value, flags) => { + write!( + f, + "SumTree({}, {}{}", + root_key + .as_ref() + .map_or("None".to_string(), |k| hex::encode(k)), + sum_value, + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + } + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + impl Element { pub fn type_str(&self) -> &str { match self { diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 31a4840e..06da9254 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,6 +1,6 @@ //! Generate proof operations -use std::collections::BTreeMap; +use std::{collections::BTreeMap, fmt}; use bincode::{Decode, Encode}; use derive_more::From; @@ -12,7 +12,7 @@ use grovedb_merk::{ proofs::{ encode_into, query::{Key, QueryItem}, - Node, Op, + Decoder, Node, Op, Tree, }, tree::value_hash, Merk, ProofWithoutEncodingResult, @@ -57,6 +57,116 @@ pub struct GroveDBProofV0 { pub root_layer: LayerProof, } +impl fmt::Display for LayerProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "LayerProof {{")?; + writeln!(f, " merk_proof: {}", decode_merk_proof(&self.merk_proof))?; + if !self.lower_layers.is_empty() { + writeln!(f, " lower_layers: {{")?; + for (key, layer_proof) in &self.lower_layers { + writeln!(f, " {} => {{", hex_to_ascii(key))?; + for line in format!("{}", layer_proof).lines() { + writeln!(f, " {}", line)?; + } + writeln!(f, " }}")?; + } + writeln!(f, " }}")?; + } + write!(f, "}}") + } +} + +impl fmt::Display for GroveDBProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + GroveDBProof::V0(proof) => write!(f, "{}", proof), + } + } +} + +impl fmt::Display for GroveDBProofV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "GroveDBProofV0 {{")?; + for line in format!("{}", self.root_layer).lines() { + writeln!(f, " {}", line)?; + } + write!(f, "}}") + } +} + +fn decode_merk_proof(proof: &[u8]) -> String { + let mut result = String::new(); + let ops = Decoder::new(proof); + + for (i, op) in ops.enumerate() { + match op { + Ok(op) => { + result.push_str(&format!("\n {}: {}", i, op_to_string(&op))); + } + Err(e) => { + result.push_str(&format!("\n {}: Error decoding op: {}", i, e)); + } + } + } + + result +} + +fn op_to_string(op: &Op) -> String { + match op { + Op::Push(node) => format!("Push({})", node_to_string(node)), + Op::PushInverted(node) => format!("PushInverted({})", node_to_string(node)), + Op::Parent => "Parent".to_string(), + Op::Child => "Child".to_string(), + Op::ParentInverted => "ParentInverted".to_string(), + Op::ChildInverted => "ChildInverted".to_string(), + } +} + +fn node_to_string(node: &Node) -> String { + match node { + Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), + Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), + Node::KV(key, value) => { + format!("KV({}, {})", hex::encode(key), element_hex_to_ascii(value)) + } + Node::KVValueHash(key, value, value_hash) => format!( + "KVValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVDigest(key, value_hash) => format!( + "KVDigest({}, HASH[{}])", + hex_to_ascii(key), + hex::encode(value_hash) + ), + Node::KVRefValueHash(key, value, value_hash) => format!( + "KVRefValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( + "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash), + feature_type + ), + } +} + +fn element_hex_to_ascii(hex_value: &[u8]) -> String { + Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)) +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + impl GroveDb { /// Prove one or more path queries. /// If we have more than one path query, we merge into a single path query @@ -95,6 +205,7 @@ impl GroveDb { let mut cost = OperationCost::default(); let proof = cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); + println!("constructed proof is {}", proof); let config = bincode::config::standard() .with_big_endian() .with_no_limit(); @@ -138,7 +249,7 @@ impl GroveDb { .0 .to_btree_map_level_results(); - println!("precomputed results are {:?}", precomputed_result_map); + println!("precomputed results are {}", precomputed_result_map); let root_layer = cost_return_on_error!( &mut cost, diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index a108a05d..2aa77675 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -1,6 +1,10 @@ use std::collections::BTreeSet; -use grovedb_merk::{execute_proof, proofs::Query, tree::value_hash}; +use grovedb_merk::{ + execute_proof, + proofs::Query, + tree::{combine_hash, value_hash}, +}; use crate::{ operations::proof::{ @@ -85,8 +89,15 @@ impl GroveDb { is_subset: bool, ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { let mut result = Vec::new(); - let root_hash = - Self::verify_layer_proof_raw(&proof.root_layer, query, &[], &mut result, is_subset)?; + let mut limit = query.query.limit; + let root_hash = Self::verify_layer_proof_raw( + &proof.root_layer, + query, + &mut limit, + &[], + &mut result, + is_subset, + )?; Ok((root_hash, result)) } @@ -157,6 +168,7 @@ impl GroveDb { fn verify_layer_proof_raw( layer_proof: &LayerProof, query: &PathQuery, + limit_left: &mut Option, current_path: &[&[u8]], result: &mut ProvedPathKeyValues, is_subset: bool, @@ -175,7 +187,7 @@ impl GroveDb { let (root_hash, merk_result) = execute_proof( &layer_proof.merk_proof, &level_query, - Some(layer_proof.lower_layers.len() as u16), + *limit_left, left_to_right, ) .unwrap() @@ -190,15 +202,44 @@ impl GroveDb { let mut path = current_path.to_vec(); let key = &proved_key_value.key; let value = &proved_key_value.value; + let hash = &proved_key_value.proof; path.push(key); verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(key) { - let lower_hash = - Self::verify_layer_proof_raw(lower_layer, query, &path, result, is_subset)?; - if lower_hash != value_hash(value).value { - return Err(Error::InvalidProof("Mismatch in lower layer hash".into())); + let element = Element::deserialize(value)?; + match element { + Element::Tree(Some(v), _) | Element::SumTree(Some(v), ..) => { + let lower_hash = Self::verify_layer_proof_raw( + lower_layer, + query, + limit_left, + &path, + result, + is_subset, + )?; + let combined_root_hash = + combine_hash(value_hash(value).value(), &lower_hash) + .value() + .to_owned(); + if hash != &combined_root_hash { + return Err(Error::InvalidProof(format!( + "Mismatch in lower layer hash, expected {}, got {}", + hex::encode(hash), + hex::encode(combined_root_hash) + ))); + } + } + Element::Tree(None, _) + | Element::SumTree(None, ..) + | Element::SumItem(..) + | Element::Item(..) + | Element::Reference(..) => { + return Err(Error::InvalidProof( + "Proof has lower layer for a non Tree".into(), + )); + } } } else { let path_key_value = ProvedPathKeyValue::from_proved_key_value( diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 52cf59d7..2189a7f9 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -30,6 +30,7 @@ use std::{ collections::{BTreeMap, HashMap}, + fmt, vec::IntoIter, }; @@ -67,6 +68,52 @@ pub struct BTreeMapLevelResult { pub key_values: BTreeMap, } +impl fmt::Display for BTreeMapLevelResultOrItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(result) => { + write!(f, "{}", result) + } + BTreeMapLevelResultOrItem::ResultItem(element) => { + write!(f, "{}", element) + } + } + } +} + +impl fmt::Display for BTreeMapLevelResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "BTreeMapLevelResult {{")?; + self.fmt_inner(f, 1)?; + write!(f, "}}") + } +} + +impl BTreeMapLevelResult { + fn fmt_inner(&self, f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result { + for (key, value) in &self.key_values { + write!(f, "{:indent$}", "", indent = indent * 2)?; + write!(f, "{}: ", hex_to_ascii(key))?; + match value { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(result) => { + writeln!(f, "BTreeMapLevelResult {{")?; + result.fmt_inner(f, indent + 1)?; + write!(f, "{:indent$}}}", "", indent = indent * 2)?; + } + BTreeMapLevelResultOrItem::ResultItem(element) => { + write!(f, "{}", element)?; + } + } + writeln!(f)?; + } + Ok(()) + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + impl BTreeMapLevelResult { pub fn len_of_values_at_path(&self, path: &[&[u8]]) -> u16 { let mut current = self; diff --git a/grovedb/src/reference_path.rs b/grovedb/src/reference_path.rs index d1fd4baa..8ab4cf58 100644 --- a/grovedb/src/reference_path.rs +++ b/grovedb/src/reference_path.rs @@ -86,6 +86,57 @@ pub enum ReferencePathType { SiblingReference(Vec), } +// Helper function to display paths +fn display_path(path: &Vec>) -> String { + path.iter() + .map(|segment| hex::encode(segment)) + .collect::>() + .join("/") +} + +impl fmt::Display for ReferencePathType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ReferencePathType::AbsolutePathReference(path) => { + write!(f, "AbsolutePathReference({})", display_path(path)) + } + ReferencePathType::UpstreamRootHeightReference(height, path) => { + write!( + f, + "UpstreamRootHeightReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::UpstreamRootHeightWithParentPathAdditionReference(height, path) => { + write!( + f, + "UpstreamRootHeightWithParentPathAdditionReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::UpstreamFromElementHeightReference(height, path) => { + write!( + f, + "UpstreamFromElementHeightReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::CousinReference(key) => { + write!(f, "CousinReference({})", hex::encode(key)) + } + ReferencePathType::RemovedCousinReference(path) => { + write!(f, "RemovedCousinReference({})", display_path(path)) + } + ReferencePathType::SiblingReference(key) => { + write!(f, "SiblingReference({})", hex::encode(key)) + } + } + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl ReferencePathType { /// Given the reference path type and the current qualified path (path+key), diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index f85311ca..f0f085c6 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -1510,19 +1510,17 @@ mod tests { let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); assert_eq!( - hex::encode(&proof), - "010285010198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01bb979cb\ - e4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2b\ - b83ff3a40504d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6\ - 579350009000676616c75653400f084ffdbc429a89c9b6620e7224d73c2e\ - e505eb7e6fb5eb574e1a8dc8b0d0884110158040a696e6e6572747265653\ - 200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4\ - 709357d40de223d769547ec21013a090155ea7d14038c7062d94930798f8\ - 85a19d6ebff8a87489a1debf665604711015e02cfb7d035b8f4a3631be46\ - c597510a16770c15c74331b3dc8dcb577a206e49675040a746573745f6c6\ - 5616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a\ - 988c0ef1372178245152fa9a43b209a6b6d95589bdc11" - ); + hex::encode(&proof), + "005e02cfb7d035b8f4a3631be46c597510a16770c15c74331b3dc8dcb577a206e49675040a746\ + 573745f6c65616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a988c0ef1\ + 372178245152fa9a43b209a6b6d95589bdc11010a746573745f6c6561663258040a696e6e65727\ + 47265653200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4709357d40d\ + e223d769547ec21013a090155ea7d14038c7062d94930798f885a19d6ebff8a87489a1debf6656\ + 04711010a696e6e65727472656532850198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ + b979cbe4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2bb83ff3a40504\ + d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6579350009000676616c75653400f08\ + 4ffdbc429a89c9b6620e7224d73c2ee505eb7e6fb5eb574e1a8dc8b0d08841100" + ); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1645,16 +1643,15 @@ mod tests { let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); assert_eq!( - hex::encode(proof.as_slice()), - "01025503046b6579310009000676616c7565310002018655e18e4555b0b65\ - bbcec64c749db6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1\ - c82951c41fcfa6487711cac6a399ebb01bb979cbe4a51e0b2f08d1101350\ - 409696e6e65727472656500080201046b657932004910536da659a3dbdbc\ - f68c4a6630e72de4ba20cfc60b08b3dd45b4225a599b6015c04097465737\ - 45f6c656166000d020109696e6e65727472656500fafa16d06e8d8696dae\ - 443731ae2a4eae521e4a9a79c331c8a7e22e34c0f1a6e01b55f830550604\ - 719833d54ce2bf139aff4bb699fa4111b9741633554318792c511" - ); + hex::encode(proof.as_slice()), + "005c0409746573745f6c656166000d020109696e6e65727472656500fafa16d06e8d8696dae443731\ + ae2a4eae521e4a9a79c331c8a7e22e34c0f1a6e01b55f830550604719833d54ce2bf139aff4bb699fa\ + 4111b9741633554318792c5110109746573745f6c656166350409696e6e65727472656500080201046\ + b657932004910536da659a3dbdbcf68c4a6630e72de4ba20cfc60b08b3dd45b4225a599b60109696e6\ + e6572747265655503046b6579310009000676616c7565310002018655e18e4555b0b65bbcec64c749d\ + b6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ + b979cbe4a51e0b2f08d1100" + ); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); diff --git a/merk/src/proofs/encoding.rs b/merk/src/proofs/encoding.rs index d0395fe7..eb1c055b 100644 --- a/merk/src/proofs/encoding.rs +++ b/merk/src/proofs/encoding.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Proofs encoding #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 452d568c..74186069 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -1,4 +1,4 @@ -use std::collections::LinkedList; +use std::{collections::LinkedList, fmt}; use grovedb_costs::{cost_return_on_error, CostResult, CostsExt, OperationCost}; @@ -60,6 +60,7 @@ pub fn execute_proof( let mut last_push = None; let mut query = query.directional_iter(left_to_right).peekable(); let mut in_range = false; + let original_limit = limit; let mut current_limit = limit; let ops = Decoder::new(bytes); @@ -192,9 +193,10 @@ pub fn execute_proof( if let Some(val) = value { if let Some(limit) = current_limit { if limit == 0 { - return Err(Error::InvalidProofError( - "Proof returns more data than limit".to_string(), - )); + return Err(Error::InvalidProofError(format!( + "Proof returns more data than limit {:?}", + original_limit + ))); } else { current_limit = Some(limit - 1); if current_limit == Some(0) { @@ -203,7 +205,7 @@ pub fn execute_proof( } } println!( - "pushing {:?}", + "pushing {}", ProvedKeyValue { key: key.clone(), value: val.clone(), @@ -304,6 +306,19 @@ pub struct ProvedKeyValue { pub proof: CryptoHash, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedKeyValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProvedKeyValue {{ key: {}, value: {}, proof: {} }}", + String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)), + hex::encode(&self.value), + hex::encode(self.proof) + ) + } +} + #[cfg(any(feature = "full", feature = "verify"))] #[derive(PartialEq, Eq, Debug)] /// Proof verification result From 2317e8115094d1cda3d1cfce11a9612353305f33 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Thu, 4 Jul 2024 16:58:52 +0700 Subject: [PATCH 06/34] more work on proofs --- grovedb/src/error.rs | 4 ++ grovedb/src/operations/proof/generate.rs | 91 +++++++++++++++++------- 2 files changed, 68 insertions(+), 27 deletions(-) diff --git a/grovedb/src/error.rs b/grovedb/src/error.rs index 956b5343..c3419f00 100644 --- a/grovedb/src/error.rs +++ b/grovedb/src/error.rs @@ -85,6 +85,10 @@ pub enum Error { /// Corrupted data CorruptedData(String), + #[error("data storage error: {0}")] + /// Corrupted storage + CorruptedStorage(String), + #[error("invalid code execution error: {0}")] /// Invalid code execution InvalidCodeExecution(&'static str), diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 06da9254..a93c0368 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,6 +1,9 @@ //! Generate proof operations -use std::{collections::BTreeMap, fmt}; +use std::{ + collections::{BTreeMap, BTreeSet}, + fmt, +}; use bincode::{Decode, Encode}; use derive_more::From; @@ -253,7 +256,7 @@ impl GroveDb { let root_layer = cost_return_on_error!( &mut cost, - self.prove_subqueries(vec![], path_query, precomputed_result_map,) + self.prove_subqueries(vec![], path_query, Some(precomputed_result_map)) ); Ok(GroveDBProofV0 { root_layer }.into()).wrap_with_cost(cost) @@ -265,7 +268,7 @@ impl GroveDb { &self, path: Vec<&[u8]>, path_query: &PathQuery, - layer_precomputed_results: BTreeMapLevelResult, + mut layer_precomputed_results: Option, ) -> CostResult { let mut cost = OperationCost::default(); @@ -281,39 +284,69 @@ impl GroveDb { self.open_non_transactional_merk_at_path(path.as_slice().into(), None) ); - let limit = layer_precomputed_results.key_values.len(); + let mut items_to_prove: BTreeSet> = layer_precomputed_results + .as_ref() + .map_or(BTreeSet::new(), |map| { + map.key_values.keys().cloned().collect() + }); + + for query_item in query_at_path.as_slice() { + match query_item { + QueryItem::Key(key) => { + items_to_prove.insert(key.clone()); + } + _ => {} + } + } - let merk_proof = cost_return_on_error!( + let (merk_proof, sub_level_keys) = cost_return_on_error!( &mut cost, self.generate_merk_proof( &path.as_slice().into(), &subtree, &query_at_path, left_to_right, - Some(limit as u16), + Some(items_to_prove.len() as u16), ) ); let lower_layers = cost_return_on_error_no_add!( &cost, - layer_precomputed_results - .key_values + sub_level_keys .into_iter() - .filter_map(|(key, value)| { - match value { - BTreeMapLevelResultOrItem::BTreeMapLevelResult(layer) => { - let mut lower_path = path.clone(); - lower_path.push(key.as_slice()); - match self - .prove_subqueries(lower_path, path_query, layer) - .unwrap_add_cost(&mut cost) - { - Ok(layer_proof) => Some(Ok((key, layer_proof))), - Err(e) => Some(Err(e)), - } - } - BTreeMapLevelResultOrItem::ResultItem(_) => None, + .filter_map(|(key)| { + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + let mut early_exit = false; + let lower_known_layer: Option = + match layer_precomputed_results + .as_mut() + .and_then(|mut layer_precomputed_results| { + layer_precomputed_results.key_values.remove(&key).and_then( + |result_or_item| match result_or_item { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(value) => { + Some(Ok(value)) + } + _ => { + early_exit = true; + None + } + }, + ) + }) + .transpose() + { + Ok(lower_known_layer) => lower_known_layer, + Err(e) => return Some(Err(e)), + }; + if early_exit { + return None; } + Some( + self.prove_subqueries(lower_path, path_query, lower_known_layer) + .unwrap_add_cost(&mut cost) + .map(|layer_proof| (key, layer_proof)), + ) }) .collect::, Error>>() ); @@ -334,7 +367,7 @@ impl GroveDb { query_items: &Vec, left_to_right: bool, limit: Option, - ) -> CostResult, Error> + ) -> CostResult<(Vec, Vec>), Error> where S: StorageContext<'a> + 'a, B: AsRef<[u8]>, @@ -350,7 +383,7 @@ impl GroveDb { .map_err(|_e| Error::InternalError("failed to generate proof")) ); - cost_return_on_error!( + let tree_keys = cost_return_on_error!( &mut cost, self.post_process_merk_proof(path, &mut proof_result) ); @@ -358,7 +391,7 @@ impl GroveDb { let mut proof_bytes = Vec::with_capacity(128); encode_into(proof_result.proof.iter(), &mut proof_bytes); - Ok(proof_bytes).wrap_with_cost(cost) + Ok((proof_bytes, tree_keys)).wrap_with_cost(cost) } /// Converts Items to Node::KV from Node::KVValueHash @@ -368,9 +401,10 @@ impl GroveDb { &self, path: &SubtreePath, proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult<(), Error> { + ) -> CostResult, Error> { let mut cost = OperationCost::default(); + let mut sub_level_keys = vec![]; for op in proof_result.proof.iter_mut() { match op { Op::Push(node) | Op::PushInverted(node) => match node { @@ -414,6 +448,9 @@ impl GroveDb { Ok(Element::Item(..)) => { *node = Node::KV(key.to_owned(), value.to_owned()) } + Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) => { + sub_level_keys.push(key.clone()) + } _ => continue, } } @@ -422,7 +459,7 @@ impl GroveDb { _ => continue, } } - Ok(()).wrap_with_cost(cost) + Ok(sub_level_keys).wrap_with_cost(cost) } } // #[cfg(test)] From 0482eefde9f6842644120e181e947cc07c8bee6c Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 5 Jul 2024 06:02:13 +0700 Subject: [PATCH 07/34] much more work on proofs --- grovedb/src/batch/mod.rs | 4 +- grovedb/src/element/query.rs | 156 ++++++++++++++++- grovedb/src/error.rs | 4 +- grovedb/src/lib.rs | 6 +- grovedb/src/operations/get/mod.rs | 2 +- grovedb/src/operations/proof/generate.rs | 203 ++++++++++++++++++----- grovedb/src/operations/proof/verify.rs | 35 +++- grovedb/src/query/mod.rs | 114 ++++++++----- grovedb/src/query_result_type.rs | 82 +++++---- grovedb/src/reference_path.rs | 2 +- grovedb/src/replication.rs | 40 +++-- grovedb/src/tests/common.rs | 2 +- grovedb/src/tests/mod.rs | 35 ++-- grovedb/src/tests/query_tests.rs | 11 +- merk/src/proofs/mod.rs | 45 +++++ merk/src/proofs/query/mod.rs | 49 +++++- merk/src/proofs/query/query_item/mod.rs | 61 ++++++- merk/src/proofs/query/verify.rs | 44 +++-- 18 files changed, 703 insertions(+), 192 deletions(-) diff --git a/grovedb/src/batch/mod.rs b/grovedb/src/batch/mod.rs index 3e4d4275..474a304e 100644 --- a/grovedb/src/batch/mod.rs +++ b/grovedb/src/batch/mod.rs @@ -1000,7 +1000,7 @@ where let mut merk = cost_return_on_error!(&mut cost, merk_wrapped); merk.set_base_root_key(root_key) .add_cost(cost) - .map_err(|_| Error::InternalError("unable to set base root key")) + .map_err(|_| Error::InternalError("unable to set base root key".to_string())) } fn execute_ops_on_path( @@ -1804,7 +1804,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(OperationCost::default()) } diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index c992ba26..e1f0409d 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -29,6 +29,8 @@ //! Query //! Implements functions in Element for querying +use std::fmt; + #[cfg(feature = "full")] use grovedb_costs::{ cost_return_on_error, cost_return_on_error_no_add, CostContext, CostResult, CostsExt, @@ -36,6 +38,7 @@ use grovedb_costs::{ }; #[cfg(feature = "full")] use grovedb_merk::proofs::query::query_item::QueryItem; +use grovedb_merk::proofs::query::SubqueryBranch; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; use grovedb_path::SubtreePath; @@ -74,6 +77,26 @@ pub struct QueryOptions { pub error_if_intermediate_path_tree_not_present: bool, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for QueryOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "QueryOptions {{")?; + writeln!(f, " allow_get_raw: {}", self.allow_get_raw)?; + writeln!(f, " allow_cache: {}", self.allow_cache)?; + writeln!( + f, + " decrease_limit_on_range_with_no_sub_elements: {}", + self.decrease_limit_on_range_with_no_sub_elements + )?; + writeln!( + f, + " error_if_intermediate_path_tree_not_present: {}", + self.error_if_intermediate_path_tree_not_present + )?; + write!(f, "}}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl Default for QueryOptions { fn default() -> Self { @@ -107,6 +130,126 @@ where pub offset: &'a mut Option, } +fn format_query(query: &Query, indent: usize) -> String { + let indent_str = " ".repeat(indent); + let mut output = format!("{}Query {{\n", indent_str); + + output += &format!("{} items: [\n", indent_str); + for item in &query.items { + output += &format!("{} {},\n", indent_str, item); + } + output += &format!("{} ],\n", indent_str); + + output += &format!( + "{} default_subquery_branch: {}\n", + indent_str, + format_subquery_branch(&query.default_subquery_branch, indent + 2) + ); + + if let Some(ref branches) = query.conditional_subquery_branches { + output += &format!("{} conditional_subquery_branches: {{\n", indent_str); + for (item, branch) in branches { + output += &format!( + "{} {}: {},\n", + indent_str, + item, + format_subquery_branch(branch, indent + 4) + ); + } + output += &format!("{} }},\n", indent_str); + } + + output += &format!("{} left_to_right: {}\n", indent_str, query.left_to_right); + output += &format!("{}}}", indent_str); + + output +} + +fn format_subquery_branch(branch: &SubqueryBranch, indent: usize) -> String { + let indent_str = " ".repeat(indent); + let mut output = format!("SubqueryBranch {{\n"); + + if let Some(ref path) = branch.subquery_path { + output += &format!("{} subquery_path: {:?},\n", indent_str, path); + } + + if let Some(ref subquery) = branch.subquery { + output += &format!( + "{} subquery: {},\n", + indent_str, + format_query(subquery, indent + 2) + ); + } + + output += &format!("{}}}", " ".repeat(indent)); + + output +} + +#[cfg(feature = "full")] +impl<'db, 'ctx, 'a> fmt::Display for PathQueryPushArgs<'db, 'ctx, 'a> +where + 'db: 'ctx, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "PathQueryPushArgs {{")?; + writeln!( + f, + " key: {}", + self.key.map_or("None".to_string(), |k| hex_to_ascii(k)) + )?; + writeln!(f, " element: {}", self.element)?; + writeln!( + f, + " path: [{}]", + self.path + .iter() + .map(|p| hex_to_ascii(*p)) + .collect::>() + .join(", ") + )?; + writeln!( + f, + " subquery_path: {}", + self.subquery_path + .as_ref() + .map_or("None".to_string(), |p| format!( + "[{}]", + p.iter() + .map(|e| hex_to_ascii(e.as_slice())) + .collect::>() + .join(", ") + )) + )?; + writeln!( + f, + " subquery: {}", + self.subquery + .as_ref() + .map_or("None".to_string(), |q| format!("\n{}", format_query(q, 4))) + )?; + writeln!(f, " left_to_right: {}", self.left_to_right)?; + writeln!(f, " query_options: {}", self.query_options)?; + writeln!(f, " result_type: {}", self.result_type)?; + writeln!( + f, + " results: [{}]", + self.results + .iter() + .map(|r| format!("{}", r)) + .collect::>() + .join(", ") + )?; + writeln!(f, " limit: {:?}", self.limit)?; + writeln!(f, " offset: {:?}", self.offset)?; + write!(f, "}}") + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + impl Element { #[cfg(feature = "full")] /// Returns a vector of result elements based on given query @@ -285,6 +428,8 @@ impl Element { #[cfg(feature = "full")] /// Push arguments to path query fn path_query_push(args: PathQueryPushArgs) -> CostResult<(), Error> { + // println!("path_query_push {} \n", args); + let mut cost = OperationCost::default(); let PathQueryPushArgs { @@ -623,7 +768,7 @@ impl Element { } } else { Err(Error::InternalError( - "QueryItem must be a Key if not a range", + "QueryItem must be a Key if not a range".to_string(), )) } } else { @@ -698,6 +843,7 @@ impl Element { #[cfg(feature = "full")] fn basic_push(args: PathQueryPushArgs) -> Result<(), Error> { + // println!("basic_push {}", args); let PathQueryPushArgs { path, key, @@ -717,14 +863,18 @@ impl Element { results.push(QueryResultElement::ElementResultItem(element)); } QueryResultType::QueryKeyElementPairResultType => { - let key = key.ok_or(Error::CorruptedPath("basic push must have a key"))?; + let key = key.ok_or(Error::CorruptedPath( + "basic push must have a key".to_string(), + ))?; results.push(QueryResultElement::KeyElementPairResultItem(( Vec::from(key), element, ))); } QueryResultType::QueryPathKeyElementTrioResultType => { - let key = key.ok_or(Error::CorruptedPath("basic push must have a key"))?; + let key = key.ok_or(Error::CorruptedPath( + "basic push must have a key".to_string(), + ))?; let path = path.iter().map(|a| a.to_vec()).collect(); results.push(QueryResultElement::PathKeyElementTrioResultItem(( path, diff --git a/grovedb/src/error.rs b/grovedb/src/error.rs index c3419f00..71a7e01e 100644 --- a/grovedb/src/error.rs +++ b/grovedb/src/error.rs @@ -16,7 +16,7 @@ pub enum Error { MissingReference(String), #[error("internal error: {0}")] /// Internal error - InternalError(&'static str), + InternalError(String), #[error("invalid proof: {0}")] /// Invalid proof InvalidProof(String), @@ -62,7 +62,7 @@ pub enum Error { /// The corrupted path represents a consistency error in internal groveDB /// logic #[error("corrupted path: {0}")] - CorruptedPath(&'static str), + CorruptedPath(String), // Query errors #[error("invalid query: {0}")] diff --git a/grovedb/src/lib.rs b/grovedb/src/lib.rs index 9a0068eb..c7550c5a 100644 --- a/grovedb/src/lib.rs +++ b/grovedb/src/lib.rs @@ -318,7 +318,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(cost) } @@ -378,7 +378,7 @@ impl GroveDb { .unwrap() } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) } } else { @@ -438,7 +438,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(cost) } diff --git a/grovedb/src/operations/get/mod.rs b/grovedb/src/operations/get/mod.rs index 12700106..4cc9f949 100644 --- a/grovedb/src/operations/get/mod.rs +++ b/grovedb/src/operations/get/mod.rs @@ -143,7 +143,7 @@ impl GroveDb { }) ) } else { - return Err(Error::CorruptedPath("empty path")).wrap_with_cost(cost); + return Err(Error::CorruptedPath("empty path".to_string())).wrap_with_cost(cost); } visited.insert(current_path.clone()); match current_element { diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index a93c0368..fc04cd37 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -31,15 +31,13 @@ use crate::{ #[derive(Debug, Clone, Copy)] pub struct ProveOptions { - pub is_verbose: bool, - pub multilevel_results: bool, + pub decrease_limit_on_empty_sub_query_result: bool, } impl Default for ProveOptions { fn default() -> Self { ProveOptions { - is_verbose: false, - multilevel_results: false, + decrease_limit_on_empty_sub_query_result: true, } } } @@ -131,7 +129,7 @@ fn node_to_string(node: &Node) -> String { Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), Node::KV(key, value) => { - format!("KV({}, {})", hex::encode(key), element_hex_to_ascii(value)) + format!("KV({}, {})", hex_to_ascii(key), element_hex_to_ascii(value)) } Node::KVValueHash(key, value, value_hash) => format!( "KVValueHash({}, {}, HASH[{}])", @@ -228,6 +226,8 @@ impl GroveDb { ) -> CostResult { let mut cost = OperationCost::default(); + let prove_options = prove_options.unwrap_or_default(); + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { return Err(Error::InvalidQuery( "proved path queries can not have offsets", @@ -238,6 +238,21 @@ impl GroveDb { // we want to query raw because we want the references to not be resolved at // this point + let values = cost_return_on_error!( + &mut cost, + self.query_raw( + path_query, + false, + true, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None + ) + ) + .0; + + println!("values are {}", values); + let precomputed_result_map = cost_return_on_error!( &mut cost, self.query_raw( @@ -254,9 +269,17 @@ impl GroveDb { println!("precomputed results are {}", precomputed_result_map); + let mut limit = path_query.query.limit; + let root_layer = cost_return_on_error!( &mut cost, - self.prove_subqueries(vec![], path_query, Some(precomputed_result_map)) + self.prove_subqueries( + vec![], + path_query, + &mut limit, + Some(precomputed_result_map), + &prove_options + ) ); Ok(GroveDBProofV0 { root_layer }.into()).wrap_with_cost(cost) @@ -268,15 +291,21 @@ impl GroveDb { &self, path: Vec<&[u8]>, path_query: &PathQuery, + overall_limit: &mut Option, mut layer_precomputed_results: Option, + prove_options: &ProveOptions, ) -> CostResult { let mut cost = OperationCost::default(); - let (query_at_path, left_to_right) = cost_return_on_error_no_add!( + let (query_at_path, left_to_right, has_subqueries) = cost_return_on_error_no_add!( &cost, path_query .query_items_at_path(path.as_slice()) - .ok_or(Error::CorruptedPath("path should be part of path_query")) + .ok_or(Error::CorruptedPath(format!( + "prove subqueries: path {} should be part of path_query {}", + path.iter().map(hex::encode).collect::>().join("/"), + path_query + ))) ); let subtree = cost_return_on_error!( @@ -284,37 +313,81 @@ impl GroveDb { self.open_non_transactional_merk_at_path(path.as_slice().into(), None) ); - let mut items_to_prove: BTreeSet> = layer_precomputed_results - .as_ref() - .map_or(BTreeSet::new(), |map| { - map.key_values.keys().cloned().collect() - }); - - for query_item in query_at_path.as_slice() { - match query_item { - QueryItem::Key(key) => { - items_to_prove.insert(key.clone()); - } - _ => {} - } - } + // let mut items_to_prove: BTreeSet> = layer_precomputed_results + // .as_ref() + // .map_or(BTreeSet::new(), |map| { + // map.key_values.keys().cloned().collect() + // }); + // + // for query_item in query_at_path.as_slice() { + // match query_item { + // QueryItem::Key(key) => { + // items_to_prove.insert(key.clone()); + // } + // _ => {} + // } + // } + + let limit = if path.len() < path_query.path.len() { + // There is no need for a limit because we are only asking for a single item + None + } else { + *overall_limit + }; - let (merk_proof, sub_level_keys) = cost_return_on_error!( + let (merk_proof, sub_level_keys, results_found) = cost_return_on_error!( &mut cost, self.generate_merk_proof( &path.as_slice().into(), &subtree, &query_at_path, left_to_right, - Some(items_to_prove.len() as u16), + has_subqueries, + limit, ) ); + if prove_options.decrease_limit_on_empty_sub_query_result + && sub_level_keys.is_empty() + && results_found == 0 + { + // In this case we should reduce by 1 to prevent attacks on proofs + overall_limit.as_mut().map(|limit| *limit -= 1); + } else if results_found > 0 { + overall_limit.as_mut().map(|limit| *limit -= results_found); + }; + + println!( + "generated merk proof at level path level [{}] sub level keys found are [{}], limit \ + is {:?}, results found were {}, has subqueries {}, {}", + path.iter() + .map(|a| hex_to_ascii(*a)) + .collect::>() + .join("/"), + sub_level_keys + .iter() + .map(|a| hex_to_ascii(a)) + .collect::>() + .join(", "), + overall_limit, + results_found, + has_subqueries, + if left_to_right { + "left to right" + } else { + "right to left" + } + ); + let lower_layers = cost_return_on_error_no_add!( &cost, sub_level_keys .into_iter() - .filter_map(|(key)| { + .map_while(|(key)| { + // Check if we should stop after processing this key + if *overall_limit == Some(0) { + return None; + } let mut lower_path = path.clone(); lower_path.push(key.as_slice()); let mut early_exit = false; @@ -337,17 +410,25 @@ impl GroveDb { .transpose() { Ok(lower_known_layer) => lower_known_layer, - Err(e) => return Some(Err(e)), + Err(e) => return Some(Some(Err(e))), }; if early_exit { - return None; + return Some(None); } - Some( - self.prove_subqueries(lower_path, path_query, lower_known_layer) - .unwrap_add_cost(&mut cost) - .map(|layer_proof| (key, layer_proof)), - ) + let result = self + .prove_subqueries( + lower_path, + path_query, + overall_limit, + lower_known_layer, + prove_options, + ) + .unwrap_add_cost(&mut cost) + .map(|layer_proof| (key, layer_proof)); + + Some(Some(result)) }) + .filter_map(|a| a) .collect::, Error>>() ); @@ -366,8 +447,9 @@ impl GroveDb { subtree: &'a Merk, query_items: &Vec, left_to_right: bool, + has_any_subquery: bool, limit: Option, - ) -> CostResult<(Vec, Vec>), Error> + ) -> CostResult<(Vec, Vec>, u16), Error> where S: StorageContext<'a> + 'a, B: AsRef<[u8]>, @@ -380,18 +462,26 @@ impl GroveDb { .prove_unchecked_query_items(query_items, limit, left_to_right) .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) .unwrap() - .map_err(|_e| Error::InternalError("failed to generate proof")) + .map_err(|e| Error::InternalError(format!( + "failed to generate proof for query_items [{}] error is : {}", + query_items + .iter() + .map(|e| e.to_string()) + .collect::>() + .join(", "), + e + ))) ); - let tree_keys = cost_return_on_error!( + let (tree_keys, results_found) = cost_return_on_error!( &mut cost, - self.post_process_merk_proof(path, &mut proof_result) + self.post_process_merk_proof(path, has_any_subquery, &mut proof_result) ); let mut proof_bytes = Vec::with_capacity(128); encode_into(proof_result.proof.iter(), &mut proof_bytes); - Ok((proof_bytes, tree_keys)).wrap_with_cost(cost) + Ok((proof_bytes, tree_keys, results_found)).wrap_with_cost(cost) } /// Converts Items to Node::KV from Node::KVValueHash @@ -400,11 +490,14 @@ impl GroveDb { fn post_process_merk_proof>( &self, path: &SubtreePath, + has_any_subquery: bool, proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult, Error> { + ) -> CostResult<(Vec, u16), Error> { let mut cost = OperationCost::default(); + let mut results_found = 0; let mut sub_level_keys = vec![]; + for op in proof_result.proof.iter_mut() { match op { Op::Push(node) | Op::PushInverted(node) => match node { @@ -443,13 +536,36 @@ impl GroveDb { key.to_owned(), serialized_referenced_elem.expect("confirmed ok above"), value_hash(value).unwrap_add_cost(&mut cost), - ) + ); + results_found += 1; } Ok(Element::Item(..)) => { - *node = Node::KV(key.to_owned(), value.to_owned()) + println!("found {}", hex_to_ascii(key)); + *node = Node::KV(key.to_owned(), value.to_owned()); + results_found += 1; } - Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) => { - sub_level_keys.push(key.clone()) + Ok(Element::Tree(Some(_), _)) => { + println!("found tree {}", hex_to_ascii(key)); + // We only want to check in sub nodes for the proof if the tree has + // elements + sub_level_keys.push(key.clone()); + } + Ok(Element::SumTree(Some(_), ..)) => { + // We only want to check in sub nodes for the proof if the tree has + // elements + sub_level_keys.push(key.clone()); + if !has_any_subquery { + results_found += 1; // if there is no + // subquery we return + // Empty trees + } + } + Ok(Element::Tree(None, _)) | Ok(Element::SumTree(None, ..)) => { + if !has_any_subquery { + results_found += 1; // if there is no + // subquery we return + // Empty trees + } } _ => continue, } @@ -459,7 +575,8 @@ impl GroveDb { _ => continue, } } - Ok(sub_level_keys).wrap_with_cost(cost) + + Ok((sub_level_keys, results_found)).wrap_with_cost(cost) } } // #[cfg(test)] diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 2aa77675..1163a424 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -108,9 +108,18 @@ impl GroveDb { result: &mut Vec, is_subset: bool, ) -> Result<[u8; 32], Error> { - let (query_items, left_to_right) = query - .query_items_at_path(current_path) - .ok_or(Error::CorruptedPath("path should be part of path_query"))?; + let (query_items, left_to_right, _) = + query + .query_items_at_path(current_path) + .ok_or(Error::CorruptedPath(format!( + "verify: path {} should be part of path_query {}", + current_path + .iter() + .map(hex::encode) + .collect::>() + .join("/"), + query + )))?; let level_query = Query { items: query_items.to_vec(), @@ -173,9 +182,19 @@ impl GroveDb { result: &mut ProvedPathKeyValues, is_subset: bool, ) -> Result<[u8; 32], Error> { - let (query_items, left_to_right) = query - .query_items_at_path(current_path) - .ok_or(Error::CorruptedPath("path should be part of path_query"))?; + let in_path_proving = current_path.len() < query.path.len(); + let (query_items, left_to_right, _) = + query + .query_items_at_path(current_path) + .ok_or(Error::CorruptedPath(format!( + "verify raw: path {} should be part of path_query {}", + current_path + .iter() + .map(hex::encode) + .collect::>() + .join("/"), + query + )))?; let level_query = Query { items: query_items.to_vec(), @@ -202,13 +221,13 @@ impl GroveDb { let mut path = current_path.to_vec(); let key = &proved_key_value.key; let value = &proved_key_value.value; + let element = Element::deserialize(value)?; let hash = &proved_key_value.proof; path.push(key); verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(key) { - let element = Element::deserialize(value)?; match element { Element::Tree(Some(v), _) | Element::SumTree(Some(v), ..) => { let lower_hash = Self::verify_layer_proof_raw( @@ -241,7 +260,7 @@ impl GroveDb { )); } } - } else { + } else if !in_path_proving { let path_key_value = ProvedPathKeyValue::from_proved_key_value( path.iter().map(|p| p.to_vec()).collect(), proved_key_value, diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index cb450e6e..2a8ad6d4 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -1,34 +1,6 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Queries -use std::{borrow::Cow, cmp::Ordering}; +use std::{borrow::Cow, cmp::Ordering, fmt}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::query::query_item::QueryItem; @@ -55,6 +27,30 @@ pub struct PathQuery { pub query: SizedQuery, } +/// Do we go from left to right +pub type LeftToRight = bool; + +/// Do we have subqueries +pub type HasSubqueries = bool; + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for PathQuery { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "PathQuery {{ path: [")?; + for (i, path_element) in self.path.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", hex_to_ascii(path_element))?; + } + write!(f, "], query: {} }}", self.query) + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, Clone)] /// Holds a query to apply to a tree and an optional limit/offset value. @@ -68,6 +64,20 @@ pub struct SizedQuery { pub offset: Option, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for SizedQuery { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "SizedQuery {{ query: {}", self.query)?; + if let Some(limit) = self.limit { + write!(f, ", limit: {}", limit)?; + } + if let Some(offset) = self.offset { + write!(f, ", offset: {}", offset)?; + } + write!(f, " }}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl SizedQuery { /// New sized query @@ -273,32 +283,38 @@ impl PathQuery { pub fn query_items_at_path<'a>( &'a self, path: &[&[u8]], - ) -> Option<(Cow<'a, Vec>, bool)> { + ) -> Option<(Cow<'a, Vec>, LeftToRight, HasSubqueries)> { fn recursive_query_items<'b>( query: &'b Query, path: &[&[u8]], - ) -> Option<(Cow<'b, Vec>, bool)> { + ) -> Option<(Cow<'b, Vec>, LeftToRight, HasSubqueries)> { if path.is_empty() { - return Some((Cow::Borrowed(&query.items), query.left_to_right)); + return Some(( + Cow::Borrowed(&query.items), + query.left_to_right, + query.has_subquery(), + )); } let key = path[0]; + let path_after_top_removed = &path[1..]; if let Some(conditional_branches) = &query.conditional_subquery_branches { for (query_item, subquery_branch) in conditional_branches { if query_item.contains(key) { if let Some(subquery_path) = &subquery_branch.subquery_path { - if path.len() <= subquery_path.len() { - if path + if path_after_top_removed.len() <= subquery_path.len() { + if path_after_top_removed .iter() .zip(subquery_path) .all(|(a, b)| *a == b.as_slice()) { - return if path.len() == subquery_path.len() { + return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &subquery_branch.subquery { Some(( Cow::Borrowed(&subquery.items), subquery.left_to_right, + subquery.has_subquery(), )) } else { None @@ -306,9 +322,10 @@ impl PathQuery { } else { Some(( Cow::Owned(vec![QueryItem::Key( - subquery_path[path.len()].clone(), + subquery_path[path_after_top_removed.len()].clone(), )]), true, + false, )) }; } @@ -318,33 +335,40 @@ impl PathQuery { return if let Some(subquery) = &subquery_branch.subquery { recursive_query_items(subquery, &path[1..]) } else { - Some((Cow::Owned(vec![QueryItem::Key(key.to_vec())]), true)) + Some((Cow::Owned(vec![QueryItem::Key(key.to_vec())]), true, false)) }; } } } if let Some(subquery_path) = &query.default_subquery_branch.subquery_path { - if path.len() <= subquery_path.len() { - if path + if path_after_top_removed.len() <= subquery_path.len() { + if path_after_top_removed .iter() .zip(subquery_path) .all(|(a, b)| *a == b.as_slice()) { - return if path.len() == subquery_path.len() { + return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &query.default_subquery_branch.subquery { - Some((Cow::Borrowed(&subquery.items), subquery.left_to_right)) + Some(( + Cow::Borrowed(&subquery.items), + subquery.left_to_right, + subquery.has_subquery(), + )) } else { None } } else { Some(( - Cow::Owned(vec![QueryItem::Key(subquery_path[path.len()].clone())]), + Cow::Owned(vec![QueryItem::Key( + subquery_path[path_after_top_removed.len()].clone(), + )]), true, + false, )) }; } - } else if path + } else if path_after_top_removed .iter() .take(subquery_path.len()) .zip(subquery_path) @@ -355,7 +379,7 @@ impl PathQuery { } } } else if let Some(subquery) = &query.default_subquery_branch.subquery { - return recursive_query_items(subquery, &path[1..]); + return recursive_query_items(subquery, path_after_top_removed); } None @@ -370,6 +394,7 @@ impl PathQuery { Some(( Cow::Owned(vec![QueryItem::Key(self.path[given_path_len].clone())]), true, + false, )) } else { None @@ -380,6 +405,7 @@ impl PathQuery { Some(( Cow::Borrowed(&self.query.query.items), self.query.query.left_to_right, + self.query.query.has_subquery(), )) } else { None diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 2189a7f9..5f89fdda 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Determines the query result form use std::{ @@ -49,6 +21,20 @@ pub enum QueryResultType { QueryPathKeyElementTrioResultType, } +impl fmt::Display for QueryResultType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryResultType::QueryElementResultType => write!(f, "QueryElementResultType"), + QueryResultType::QueryKeyElementPairResultType => { + write!(f, "QueryKeyElementPairResultType") + } + QueryResultType::QueryPathKeyElementTrioResultType => { + write!(f, "QueryPathKeyElementTrioResultType") + } + } + } +} + /// Query result elements #[derive(Debug, Clone)] pub struct QueryResultElements { @@ -56,6 +42,16 @@ pub struct QueryResultElements { pub elements: Vec, } +impl fmt::Display for QueryResultElements { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "QueryResultElements {{")?; + for (index, element) in self.elements.iter().enumerate() { + writeln!(f, " {}: {}", index, element)?; + } + write!(f, "}}") + } +} + #[derive(Debug, Clone)] pub enum BTreeMapLevelResultOrItem { BTreeMapLevelResult(BTreeMapLevelResult), @@ -441,6 +437,36 @@ pub enum QueryResultElement { PathKeyElementTrioResultItem(PathKeyElementTrio), } +impl fmt::Display for QueryResultElement { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryResultElement::ElementResultItem(element) => { + write!(f, "ElementResultItem({})", element) + } + QueryResultElement::KeyElementPairResultItem((key, element)) => { + write!( + f, + "KeyElementPairResultItem(key: {}, element: {})", + hex_to_ascii(key), + element + ) + } + QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) => { + write!( + f, + "PathKeyElementTrioResultItem(path: {}, key: {}, element: {})", + path.iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join("/"), + hex_to_ascii(key), + element + ) + } + } + } +} + #[cfg(feature = "full")] impl QueryResultElement { /// Map element diff --git a/grovedb/src/reference_path.rs b/grovedb/src/reference_path.rs index 8ab4cf58..e05c7507 100644 --- a/grovedb/src/reference_path.rs +++ b/grovedb/src/reference_path.rs @@ -180,7 +180,7 @@ pub fn path_from_reference_qualified_path_type>( ) -> Result>, Error> { match current_qualified_path.split_last() { None => Err(Error::CorruptedPath( - "qualified path should always have an element", + "qualified path should always have an element".to_string(), )), Some((key, path)) => { path_from_reference_path_type(reference_path_type, path, Some(key.as_ref())) diff --git a/grovedb/src/replication.rs b/grovedb/src/replication.rs index 5f7db1f3..b6533868 100644 --- a/grovedb/src/replication.rs +++ b/grovedb/src/replication.rs @@ -364,7 +364,7 @@ impl GroveDb { || !state_sync_info.processed_prefixes.is_empty() { return Err(Error::InternalError( - "GroveDB has already started a snapshot syncing", + "GroveDB has already started a snapshot syncing".to_string(), )); } @@ -384,7 +384,9 @@ impl GroveDb { .insert(root_prefix, root_prefix_state_sync_info); state_sync_info.app_hash = app_hash; } else { - return Err(Error::InternalError("Unable to open merk for replication")); + return Err(Error::InternalError( + "Unable to open merk for replication".to_string(), + )); } Ok(state_sync_info) @@ -424,7 +426,9 @@ impl GroveDb { replication::util_split_global_chunk_id(global_chunk_id, &state_sync_info.app_hash)?; if state_sync_info.current_prefixes.is_empty() { - return Err(Error::InternalError("GroveDB is not in syncing mode")); + return Err(Error::InternalError( + "GroveDB is not in syncing mode".to_string(), + )); } if let Some(subtree_state_sync) = state_sync_info.current_prefixes.remove(&chunk_prefix) { if let Ok((res, mut new_subtree_state_sync)) = @@ -453,12 +457,16 @@ impl GroveDb { // Subtree is finished. We can save it. match new_subtree_state_sync.restorer.take() { - None => Err(Error::InternalError("Unable to finalize subtree")), + None => Err(Error::InternalError( + "Unable to finalize subtree".to_string(), + )), Some(restorer) => { if (new_subtree_state_sync.num_processed_chunks > 0) && (restorer.finalize().is_err()) { - return Err(Error::InternalError("Unable to finalize Merk")); + return Err(Error::InternalError( + "Unable to finalize Merk".to_string(), + )); } state_sync_info.processed_prefixes.insert(chunk_prefix); @@ -479,16 +487,20 @@ impl GroveDb { next_chunk_ids.extend(res); Ok((next_chunk_ids, new_state_sync_info)) } else { - Err(Error::InternalError("Unable to discover Subtrees")) + Err(Error::InternalError( + "Unable to discover Subtrees".to_string(), + )) } } } } } else { - Err(Error::InternalError("Unable to process incoming chunk")) + Err(Error::InternalError( + "Unable to process incoming chunk".to_string(), + )) } } else { - Err(Error::InternalError("Invalid incoming prefix")) + Err(Error::InternalError("Invalid incoming prefix".to_string())) } } @@ -510,7 +522,7 @@ impl GroveDb { Some(restorer) => { if !state_sync_info.pending_chunks.contains(chunk_id) { return Err(Error::InternalError( - "Incoming global_chunk_id not expected", + "Incoming global_chunk_id not expected".to_string(), )); } state_sync_info.pending_chunks.remove(chunk_id); @@ -529,7 +541,7 @@ impl GroveDb { } _ => { return Err(Error::InternalError( - "Unable to process incoming chunk", + "Unable to process incoming chunk".to_string(), )); } }; @@ -543,7 +555,9 @@ impl GroveDb { } } _ => { - return Err(Error::InternalError("Invalid internal state (restorer")); + return Err(Error::InternalError( + "Invalid internal state (restorer".to_string(), + )); } } @@ -593,7 +607,9 @@ impl GroveDb { let root_chunk_prefix = prefix.to_vec(); res.push(root_chunk_prefix.to_vec()); } else { - return Err(Error::InternalError("Unable to open Merk for replication")); + return Err(Error::InternalError( + "Unable to open Merk for replication".to_string(), + )); } } } diff --git a/grovedb/src/tests/common.rs b/grovedb/src/tests/common.rs index 10f05b80..7cd43bd4 100644 --- a/grovedb/src/tests/common.rs +++ b/grovedb/src/tests/common.rs @@ -48,7 +48,7 @@ fn deserialize_and_extract_item_bytes(raw_bytes: &[u8]) -> Result, Error let elem = Element::deserialize(raw_bytes)?; match elem { Element::Item(item, _) => Ok(item), - _ => Err(Error::CorruptedPath("expected only item type")), + _ => Err(Error::CorruptedPath("expected only item type".to_string())), } } diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index f0f085c6..b1cf1daf 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -2230,16 +2230,7 @@ mod tests { ); // Generate proof - let proof = db - .prove_query( - &path_query, - Some(ProveOptions { - is_verbose: false, - multilevel_results: true, - }), - ) - .unwrap() - .unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); // Verify proof let (hash, result_set) = @@ -2275,6 +2266,30 @@ mod tests { fn test_path_query_proofs_with_direction() { let temp_db = make_deep_tree(); + // root + // deep_leaf + // deep_node_1 + // deeper_1 + // k1,v1 + // k2,v2 + // k3,v3 + // deeper_2 + // k4,v4 + // k5,v5 + // k6,v6 + // deep_node_2 + // deeper_3 + // k7,v7 + // k8,v8 + // k9,v9 + // deeper_4 + // k10,v10 + // k11,v11 + // deeper_5 + // k12,v12 + // k13,v13 + // k14,v14 + let mut query = Query::new_with_direction(false); query.insert_all(); diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 254a4d06..04998bfa 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -2379,16 +2379,7 @@ fn test_subset_proof_verification() { ); // prove verbose - let verbose_proof = db - .prove_query( - &path_query, - Some(ProveOptions { - is_verbose: true, - multilevel_results: false, - }), - ) - .unwrap() - .unwrap(); + let verbose_proof = db.prove_query(&path_query, None).unwrap().unwrap(); assert!(verbose_proof.len() > proof.len()); // subset path query diff --git a/merk/src/proofs/mod.rs b/merk/src/proofs/mod.rs index bf3718f4..5863eaf1 100644 --- a/merk/src/proofs/mod.rs +++ b/merk/src/proofs/mod.rs @@ -76,9 +76,54 @@ pub enum Node { KVValueHash(Vec, Vec, CryptoHash), /// Represents, the key, value, value_hash and feature_type of a tree node + /// Used by Sum trees KVValueHashFeatureType(Vec, Vec, CryptoHash, TreeFeatureType), /// Represents the key, value of some referenced node and value_hash of /// current tree node KVRefValueHash(Vec, Vec, CryptoHash), } + +use std::fmt; + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for Node { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let node_string = match self { + Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), + Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), + Node::KV(key, value) => { + format!("KV({}, {})", hex_to_ascii(key), hex_to_ascii(value)) + } + Node::KVValueHash(key, value, value_hash) => format!( + "KVValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVDigest(key, value_hash) => format!( + "KVDigest({}, HASH[{}])", + hex_to_ascii(key), + hex::encode(value_hash) + ), + Node::KVRefValueHash(key, value, value_hash) => format!( + "KVRefValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( + "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash), + feature_type + ), + }; + write!(f, "{}", node_string) + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 8ab86ea1..ff271ce1 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -44,7 +44,7 @@ mod verify; #[cfg(any(feature = "full", feature = "verify"))] use std::cmp::Ordering; -use std::{collections::HashSet, ops::RangeFull}; +use std::{collections::HashSet, fmt, ops::RangeFull}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_costs::{cost_return_on_error, CostContext, CostResult, CostsExt, OperationCost}; @@ -109,6 +109,53 @@ pub struct Query { pub left_to_right: bool, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for SubqueryBranch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "SubqueryBranch {{ ")?; + if let Some(path) = &self.subquery_path { + write!(f, "subquery_path: [")?; + for (i, element) in path.iter().enumerate() { + if i > 0 { + write!(f, ", ")? + } + write!(f, "{}", hex::encode(element))?; + } + write!(f, "], ")?; + } + if let Some(subquery) = &self.subquery { + write!(f, "subquery: {} ", subquery)?; + } + write!(f, "}}") + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for Query { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "Query {{")?; + writeln!(f, " items: [")?; + for item in &self.items { + writeln!(f, " {},", item)?; + } + writeln!(f, " ],")?; + writeln!( + f, + " default_subquery_branch: {},", + self.default_subquery_branch + )?; + if let Some(conditional_branches) = &self.conditional_subquery_branches { + writeln!(f, " conditional_subquery_branches: {{")?; + for (item, branch) in conditional_branches { + writeln!(f, " {}: {},", item, branch)?; + } + writeln!(f, " }},")?; + } + writeln!(f, " left_to_right: {},", self.left_to_right)?; + write!(f, "}}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl Query { /// Creates a new query which contains no items. diff --git a/merk/src/proofs/query/query_item/mod.rs b/merk/src/proofs/query/query_item/mod.rs index e950df81..a637c6b6 100644 --- a/merk/src/proofs/query/query_item/mod.rs +++ b/merk/src/proofs/query/query_item/mod.rs @@ -5,6 +5,7 @@ mod merge; use std::{ cmp, cmp::Ordering, + fmt, hash::Hash, ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}, }; @@ -33,6 +34,54 @@ pub enum QueryItem { RangeAfterToInclusive(RangeInclusive>), } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for QueryItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryItem::Key(key) => write!(f, "Key({})", hex_to_ascii(key)), + QueryItem::Range(range) => write!( + f, + "Range({} .. {})", + hex_to_ascii(&range.start), + hex_to_ascii(&range.end) + ), + QueryItem::RangeInclusive(range) => write!( + f, + "RangeInclusive({} ..= {})", + hex_to_ascii(range.start()), + hex_to_ascii(range.end()) + ), + QueryItem::RangeFull(_) => write!(f, "RangeFull"), + QueryItem::RangeFrom(range) => { + write!(f, "RangeFrom({} ..)", hex_to_ascii(&range.start)) + } + QueryItem::RangeTo(range) => write!(f, "RangeTo(.. {})", hex_to_ascii(&range.end)), + QueryItem::RangeToInclusive(range) => { + write!(f, "RangeToInclusive(..= {})", hex_to_ascii(&range.end)) + } + QueryItem::RangeAfter(range) => { + write!(f, "RangeAfter({} <..)", hex_to_ascii(&range.start)) + } + QueryItem::RangeAfterTo(range) => write!( + f, + "RangeAfterTo({} <.. {})", + hex_to_ascii(&range.start), + hex_to_ascii(&range.end) + ), + QueryItem::RangeAfterToInclusive(range) => write!( + f, + "RangeAfterToInclusive({} <..= {})", + hex_to_ascii(range.start()), + hex_to_ascii(range.end()) + ), + } + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + #[cfg(any(feature = "full", feature = "verify"))] impl Hash for QueryItem { fn hash(&self, state: &mut H) { @@ -295,11 +344,13 @@ impl QueryItem { iter.seek(end).flat_map(|_| iter.prev()) } } - QueryItem::RangeInclusive(range_inclusive) => iter.seek(if left_to_right { - range_inclusive.start() - } else { - range_inclusive.end() - }), + QueryItem::RangeInclusive(range_inclusive) => { + if left_to_right { + iter.seek(range_inclusive.start()) + } else { + iter.seek_for_prev(range_inclusive.end()) + } + } QueryItem::RangeFull(..) => { if left_to_right { iter.seek_to_first() diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 74186069..1abf8d16 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -233,24 +233,32 @@ pub fn execute_proof( Ok(()) }; - if let Node::KV(key, value) = node { - println!("going into kv"); - execute_node(key, Some(value), value_hash(value).unwrap())?; - } else if let Node::KVValueHash(key, value, value_hash) = node { - println!("going into kv hash"); - execute_node(key, Some(value), *value_hash)?; - } else if let Node::KVDigest(key, value_hash) = node { - println!("going into kv digest"); - execute_node(key, None, *value_hash)?; - } else if let Node::KVRefValueHash(key, value, value_hash) = node { - println!("going into kv ref value hash"); - execute_node(key, Some(value), *value_hash)?; - } else if in_range { - // we encountered a queried range but the proof was abridged (saw a - // non-KV push), we are missing some part of the range - return Err(Error::InvalidProofError( - "Proof is missing data for query for range".to_string(), - )); + match node { + Node::KV(key, value) => { + println!("Processing KV node"); + execute_node(key, Some(value), value_hash(value).unwrap())?; + } + Node::KVValueHash(key, value, value_hash) => { + println!("Processing KVValueHash node"); + execute_node(key, Some(value), *value_hash)?; + } + Node::KVDigest(key, value_hash) => { + println!("Processing KVDigest node"); + execute_node(key, None, *value_hash)?; + } + Node::KVRefValueHash(key, value, value_hash) => { + println!("Processing KVRefValueHash node"); + execute_node(key, Some(value), *value_hash)?; + } + Node::Hash(_) | Node::KVHash(_) | Node::KVValueHashFeatureType(..) => { + if in_range { + return Err(Error::InvalidProofError(format!( + "Proof is missing data for query range. Encountered unexpected node type: \ + {}", + node + ))); + } + } } last_push = Some(node.clone()); From b4428761754f37da3616fecea857357a5ee6d294 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 5 Jul 2024 08:48:19 +0700 Subject: [PATCH 08/34] more work on proofs --- grovedb/src/operations/proof/generate.rs | 285 ++++++++++++++--------- grovedb/src/operations/proof/util.rs | 32 +++ grovedb/src/operations/proof/verify.rs | 137 ++++++----- grovedb/src/tests/mod.rs | 10 +- merk/src/proofs/query/verify.rs | 14 ++ 5 files changed, 305 insertions(+), 173 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index fc04cd37..8fa97efb 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -29,11 +29,21 @@ use crate::{ Element, Error, GroveDb, PathQuery, }; -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, Encode, Decode)] pub struct ProveOptions { pub decrease_limit_on_empty_sub_query_result: bool, } +impl fmt::Display for ProveOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProveOptions {{ decrease_limit_on_empty_sub_query_result: {} }}", + self.decrease_limit_on_empty_sub_query_result + ) + } +} + impl Default for ProveOptions { fn default() -> Self { ProveOptions { @@ -56,6 +66,7 @@ pub enum GroveDBProof { #[derive(Encode, Decode)] pub struct GroveDBProofV0 { pub root_layer: LayerProof, + pub prove_options: ProveOptions, } impl fmt::Display for LayerProof { @@ -273,16 +284,15 @@ impl GroveDb { let root_layer = cost_return_on_error!( &mut cost, - self.prove_subqueries( - vec![], - path_query, - &mut limit, - Some(precomputed_result_map), - &prove_options - ) + self.prove_subqueries(vec![], path_query, &mut limit, &prove_options) ); - Ok(GroveDBProofV0 { root_layer }.into()).wrap_with_cost(cost) + Ok(GroveDBProofV0 { + root_layer, + prove_options, + } + .into()) + .wrap_with_cost(cost) } /// Perform a pre-order traversal of the tree based on the provided @@ -292,7 +302,6 @@ impl GroveDb { path: Vec<&[u8]>, path_query: &PathQuery, overall_limit: &mut Option, - mut layer_precomputed_results: Option, prove_options: &ProveOptions, ) -> CostResult { let mut cost = OperationCost::default(); @@ -335,42 +344,18 @@ impl GroveDb { *overall_limit }; - let (merk_proof, sub_level_keys, results_found) = cost_return_on_error!( + let mut merk_proof = cost_return_on_error!( &mut cost, - self.generate_merk_proof( - &path.as_slice().into(), - &subtree, - &query_at_path, - left_to_right, - has_subqueries, - limit, - ) + self.generate_merk_proof(&subtree, &query_at_path, left_to_right, limit,) ); - if prove_options.decrease_limit_on_empty_sub_query_result - && sub_level_keys.is_empty() - && results_found == 0 - { - // In this case we should reduce by 1 to prevent attacks on proofs - overall_limit.as_mut().map(|limit| *limit -= 1); - } else if results_found > 0 { - overall_limit.as_mut().map(|limit| *limit -= results_found); - }; - println!( - "generated merk proof at level path level [{}] sub level keys found are [{}], limit \ - is {:?}, results found were {}, has subqueries {}, {}", + "generated merk proof at level path level [{}], limit is {:?}, has subqueries {}, {}", path.iter() .map(|a| hex_to_ascii(*a)) .collect::>() .join("/"), - sub_level_keys - .iter() - .map(|a| hex_to_ascii(a)) - .collect::>() - .join(", "), overall_limit, - results_found, has_subqueries, if left_to_right { "left to right" @@ -379,61 +364,148 @@ impl GroveDb { } ); - let lower_layers = cost_return_on_error_no_add!( - &cost, - sub_level_keys - .into_iter() - .map_while(|(key)| { - // Check if we should stop after processing this key - if *overall_limit == Some(0) { - return None; - } - let mut lower_path = path.clone(); - lower_path.push(key.as_slice()); - let mut early_exit = false; - let lower_known_layer: Option = - match layer_precomputed_results - .as_mut() - .and_then(|mut layer_precomputed_results| { - layer_precomputed_results.key_values.remove(&key).and_then( - |result_or_item| match result_or_item { - BTreeMapLevelResultOrItem::BTreeMapLevelResult(value) => { - Some(Ok(value)) - } - _ => { - early_exit = true; - None - } - }, - ) - }) - .transpose() - { - Ok(lower_known_layer) => lower_known_layer, - Err(e) => return Some(Some(Err(e))), - }; - if early_exit { - return Some(None); + let mut lower_layers = BTreeMap::new(); + + let mut has_a_result_at_level = false; + let mut done_with_results = false; + + for op in merk_proof.proof.iter_mut() { + done_with_results |= overall_limit == &Some(0); + match op { + Op::Push(node) | Op::PushInverted(node) => match node { + Node::KV(key, value) | Node::KVValueHash(key, value, ..) + if !done_with_results => + { + let elem = Element::deserialize(value); + match elem { + Ok(Element::Reference(reference_path, ..)) => { + let absolute_path = cost_return_on_error!( + &mut cost, + path_from_reference_path_type( + reference_path, + &path.to_vec(), + Some(key.as_slice()) + ) + .wrap_with_cost(OperationCost::default()) + ); + + let referenced_elem = cost_return_on_error!( + &mut cost, + self.follow_reference( + absolute_path.as_slice().into(), + true, + None + ) + ); + + let serialized_referenced_elem = referenced_elem.serialize(); + if serialized_referenced_elem.is_err() { + return Err(Error::CorruptedData(String::from( + "unable to serialize element", + ))) + .wrap_with_cost(cost); + } + + *node = Node::KVRefValueHash( + key.to_owned(), + serialized_referenced_elem.expect("confirmed ok above"), + value_hash(value).unwrap_add_cost(&mut cost), + ); + overall_limit.as_mut().map(|limit| *limit -= 1); + has_a_result_at_level |= true; + } + Ok(Element::Item(..)) if !done_with_results => { + println!("found {}", hex_to_ascii(key)); + *node = Node::KV(key.to_owned(), value.to_owned()); + overall_limit.as_mut().map(|limit| *limit -= 1); + has_a_result_at_level |= true; + } + Ok(Element::Tree(Some(_), _)) if !done_with_results => { + println!("found tree {}", hex_to_ascii(key)); + // We only want to check in sub nodes for the proof if the tree has + // elements + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + + let previous_limit = *overall_limit; + + let layer_proof = cost_return_on_error!( + &mut cost, + self.prove_subqueries( + lower_path, + path_query, + overall_limit, + prove_options, + ) + ); + + if previous_limit != *overall_limit { + // a lower layer updated the limit, don't subtract 1 at this + // level + has_a_result_at_level |= true; + } + lower_layers.insert(key.clone(), layer_proof); + } + Ok(Element::SumTree(Some(_), ..)) if !done_with_results => { + // We only want to check in sub nodes for the proof if the tree has + // elements + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + + let layer_proof = cost_return_on_error!( + &mut cost, + self.prove_subqueries( + lower_path, + path_query, + overall_limit, + prove_options, + ) + ); + lower_layers.insert(key.clone(), layer_proof); + if !has_subqueries { + overall_limit.as_mut().map(|limit| *limit -= 1); + has_a_result_at_level |= true; + } + } + Ok(Element::Tree(None, _)) | Ok(Element::SumTree(None, ..)) + if !done_with_results => + { + if !has_subqueries { + overall_limit.as_mut().map(|limit| *limit -= 1); + has_a_result_at_level |= true; + } + } + // todo: transform the unused trees into a Hash or KVHash to make proof + // smaller Ok(Element::Tree(..)) if + // done_with_results => { *node = + // Node::Hash() // we are done with the + // results, we can modify the proof to alter + // } + _ => continue, + } } - let result = self - .prove_subqueries( - lower_path, - path_query, - overall_limit, - lower_known_layer, - prove_options, - ) - .unwrap_add_cost(&mut cost) - .map(|layer_proof| (key, layer_proof)); - - Some(Some(result)) - }) - .filter_map(|a| a) - .collect::, Error>>() - ); + _ => continue, + }, + _ => continue, + } + } + + if !has_a_result_at_level && !done_with_results { + println!( + "no results at level {}", + path.iter() + .map(|a| hex_to_ascii(*a)) + .collect::>() + .join("/") + ); + overall_limit.as_mut().map(|limit| *limit -= 1); + } + + let mut serialized_merk_proof = Vec::with_capacity(1024); + encode_into(merk_proof.proof.iter(), &mut serialized_merk_proof); Ok(LayerProof { - merk_proof, + merk_proof: serialized_merk_proof, lower_layers, }) .wrap_with_cost(cost) @@ -441,28 +513,21 @@ impl GroveDb { /// Generates query proof given a subtree and appends the result to a proof /// list - fn generate_merk_proof<'a, S, B>( + fn generate_merk_proof<'a, S>( &self, - path: &SubtreePath, subtree: &'a Merk, query_items: &Vec, left_to_right: bool, - has_any_subquery: bool, limit: Option, - ) -> CostResult<(Vec, Vec>, u16), Error> + ) -> CostResult where S: StorageContext<'a> + 'a, - B: AsRef<[u8]>, { - let mut cost = OperationCost::default(); - - let mut proof_result = cost_return_on_error_no_add!( - &cost, - subtree - .prove_unchecked_query_items(query_items, limit, left_to_right) - .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) - .unwrap() - .map_err(|e| Error::InternalError(format!( + subtree + .prove_unchecked_query_items(query_items, limit, left_to_right) + .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) + .map_err(|e| { + Error::InternalError(format!( "failed to generate proof for query_items [{}] error is : {}", query_items .iter() @@ -470,18 +535,8 @@ impl GroveDb { .collect::>() .join(", "), e - ))) - ); - - let (tree_keys, results_found) = cost_return_on_error!( - &mut cost, - self.post_process_merk_proof(path, has_any_subquery, &mut proof_result) - ); - - let mut proof_bytes = Vec::with_capacity(128); - encode_into(proof_result.proof.iter(), &mut proof_bytes); - - Ok((proof_bytes, tree_keys, results_found)).wrap_with_cost(cost) + )) + }) } /// Converts Items to Node::KV from Node::KVValueHash diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 6873ce05..c68e1afe 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -11,6 +11,8 @@ use grovedb_merk::{ #[cfg(any(feature = "full", feature = "verify"))] use integer_encoding::{VarInt, VarIntReader}; +use crate::Element; + #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedKeyValues = Vec; @@ -31,6 +33,36 @@ pub struct ProvedPathKeyValue { pub proof: CryptoHash, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedPathKeyValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ProvedPathKeyValue {{\n")?; + write!( + f, + " path: [{}],\n", + self.path + .iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join(", ") + )?; + write!(f, " key: {},\n", hex_to_ascii(&self.key))?; + write!(f, " value: {},\n", element_hex_to_ascii(&self.value))?; + write!(f, " proof: {}\n", hex::encode(self.proof))?; + write!(f, "}}") + } +} + +fn element_hex_to_ascii(hex_value: &[u8]) -> String { + Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)) +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) +} + impl ProvedPathKeyValue { // TODO: make path a reference /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 1163a424..938766a5 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -10,6 +10,7 @@ use crate::{ operations::proof::{ generate::{GroveDBProof, GroveDBProofV0, LayerProof}, util::{ProvedPathKeyValue, ProvedPathKeyValues}, + ProveOptions, }, query_result_type::PathKeyOptionalElementTrio, Element, Error, GroveDb, PathQuery, @@ -66,8 +67,14 @@ impl GroveDb { is_subset: bool, ) -> Result<([u8; 32], Vec), Error> { let mut result = Vec::new(); - let root_hash = - Self::verify_layer_proof(&proof.root_layer, query, &[], &mut result, is_subset)?; + let root_hash = Self::verify_layer_proof( + &proof.root_layer, + &proof.prove_options, + query, + &[], + &mut result, + is_subset, + )?; Ok((root_hash, result)) } @@ -92,6 +99,7 @@ impl GroveDb { let mut limit = query.query.limit; let root_hash = Self::verify_layer_proof_raw( &proof.root_layer, + &proof.prove_options, query, &mut limit, &[], @@ -103,6 +111,7 @@ impl GroveDb { fn verify_layer_proof( layer_proof: &LayerProof, + prove_options: &ProveOptions, query: &PathQuery, current_path: &[&[u8]], result: &mut Vec, @@ -151,8 +160,14 @@ impl GroveDb { verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(&key) { - let lower_hash = - Self::verify_layer_proof(lower_layer, query, &path, result, is_subset)?; + let lower_hash = Self::verify_layer_proof( + lower_layer, + prove_options, + query, + &path, + result, + is_subset, + )?; if lower_hash != value_hash(&value).value { return Err(Error::InvalidProof("Mismatch in lower layer hash".into())); } @@ -176,6 +191,7 @@ impl GroveDb { fn verify_layer_proof_raw( layer_proof: &LayerProof, + prove_options: &ProveOptions, query: &PathQuery, limit_left: &mut Option, current_path: &[&[u8]], @@ -215,65 +231,78 @@ impl GroveDb { Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) })?; - let mut verified_keys = BTreeSet::new(); - - for proved_key_value in merk_result.result_set { - let mut path = current_path.to_vec(); - let key = &proved_key_value.key; - let value = &proved_key_value.value; - let element = Element::deserialize(value)?; - let hash = &proved_key_value.proof; - path.push(key); + println!("merk result is {}", merk_result); - verified_keys.insert(key.clone()); + let mut verified_keys = BTreeSet::new(); - if let Some(lower_layer) = layer_proof.lower_layers.get(key) { - match element { - Element::Tree(Some(v), _) | Element::SumTree(Some(v), ..) => { - let lower_hash = Self::verify_layer_proof_raw( - lower_layer, - query, - limit_left, - &path, - result, - is_subset, - )?; - let combined_root_hash = - combine_hash(value_hash(value).value(), &lower_hash) - .value() - .to_owned(); - if hash != &combined_root_hash { - return Err(Error::InvalidProof(format!( - "Mismatch in lower layer hash, expected {}, got {}", - hex::encode(hash), - hex::encode(combined_root_hash) - ))); + if merk_result.result_set.is_empty() { + limit_left.as_mut().map(|limit| *limit -= 1); + } else { + for proved_key_value in merk_result.result_set { + let mut path = current_path.to_vec(); + let key = &proved_key_value.key; + let value = &proved_key_value.value; + let element = Element::deserialize(value)?; + let hash = &proved_key_value.proof; + path.push(key); + + verified_keys.insert(key.clone()); + + if let Some(lower_layer) = layer_proof.lower_layers.get(key) { + match element { + Element::Tree(Some(v), _) | Element::SumTree(Some(v), ..) => { + let lower_hash = Self::verify_layer_proof_raw( + lower_layer, + prove_options, + query, + limit_left, + &path, + result, + is_subset, + )?; + let combined_root_hash = + combine_hash(value_hash(value).value(), &lower_hash) + .value() + .to_owned(); + if hash != &combined_root_hash { + return Err(Error::InvalidProof(format!( + "Mismatch in lower layer hash, expected {}, got {}", + hex::encode(hash), + hex::encode(combined_root_hash) + ))); + } + if limit_left == &Some(0) { + break; + } + } + Element::Tree(None, _) + | Element::SumTree(None, ..) + | Element::SumItem(..) + | Element::Item(..) + | Element::Reference(..) => { + return Err(Error::InvalidProof( + "Proof has lower layer for a non Tree".into(), + )); } } - Element::Tree(None, _) - | Element::SumTree(None, ..) - | Element::SumItem(..) - | Element::Item(..) - | Element::Reference(..) => { - return Err(Error::InvalidProof( - "Proof has lower layer for a non Tree".into(), - )); + } else if !in_path_proving { + let path_key_value = ProvedPathKeyValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, + ); + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; } + println!( + "pushing {} limit left after is {:?}", + &path_key_value, limit_left + ); + result.push(path_key_value); } - } else if !in_path_proving { - let path_key_value = ProvedPathKeyValue::from_proved_key_value( - path.iter().map(|p| p.to_vec()).collect(), - proved_key_value, - ); - result.push(path_key_value); } } - // if !is_subset { - // // Verify completeness only if not doing subset verification - // self.verify_completeness(&query_items, &merk_result.result_set, - // current_path)?; } - Ok(root_hash) } diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index b1cf1daf..9b58e692 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -1519,7 +1519,7 @@ mod tests { 04711010a696e6e65727472656532850198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ b979cbe4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2bb83ff3a40504\ d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6579350009000676616c75653400f08\ - 4ffdbc429a89c9b6620e7224d73c2ee505eb7e6fb5eb574e1a8dc8b0d08841100" + 4ffdbc429a89c9b6620e7224d73c2ee505eb7e6fb5eb574e1a8dc8b0d0884110001" ); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -1650,7 +1650,7 @@ mod tests { b657932004910536da659a3dbdbcf68c4a6630e72de4ba20cfc60b08b3dd45b4225a599b60109696e6\ e6572747265655503046b6579310009000676616c7565310002018655e18e4555b0b65bbcec64c749d\ b6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ - b979cbe4a51e0b2f08d1100" + b979cbe4a51e0b2f08d110001" ); let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); @@ -2105,7 +2105,8 @@ mod tests { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), None), + SizedQuery::new(query, Some(6), None), /* we need to add a bigger limit because of + * empty proved sub trees */ ); let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = @@ -2313,7 +2314,8 @@ mod tests { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(4), None), + SizedQuery::new(query, Some(7), None), /* we need 7 because of intermediate empty + * trees in proofs */ ); let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 1abf8d16..55ff5231 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -337,6 +337,20 @@ pub struct ProofVerificationResult { pub limit: Option, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProofVerificationResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "ProofVerificationResult {{")?; + writeln!(f, " result_set: [")?; + for (index, proved_key_value) in self.result_set.iter().enumerate() { + writeln!(f, " {}: {},", index, proved_key_value)?; + } + writeln!(f, " ],")?; + writeln!(f, " limit: {:?}", self.limit)?; + write!(f, "}}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] /// Verifies the encoded proof with the given query and expected hash pub fn verify_query( From 1e1c97ecf97ffe048f7b5ce0ff1b06d64d067764 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 5 Jul 2024 21:16:22 +0700 Subject: [PATCH 09/34] more work --- grovedb/src/operations/proof/generate.rs | 46 +++------ grovedb/src/operations/proof/verify.rs | 37 ++++--- grovedb/src/query/mod.rs | 121 +++++++++++++++-------- grovedb/src/tests/mod.rs | 4 +- merk/src/proofs/query/mod.rs | 2 +- merk/src/proofs/query/verify.rs | 10 ++ 6 files changed, 129 insertions(+), 91 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 8fa97efb..139fe676 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -306,13 +306,16 @@ impl GroveDb { ) -> CostResult { let mut cost = OperationCost::default(); - let (query_at_path, left_to_right, has_subqueries) = cost_return_on_error_no_add!( + let query = cost_return_on_error_no_add!( &cost, path_query .query_items_at_path(path.as_slice()) .ok_or(Error::CorruptedPath(format!( "prove subqueries: path {} should be part of path_query {}", - path.iter().map(hex::encode).collect::>().join("/"), + path.iter() + .map(|a| hex_to_ascii(*a)) + .collect::>() + .join("/"), path_query ))) ); @@ -346,18 +349,17 @@ impl GroveDb { let mut merk_proof = cost_return_on_error!( &mut cost, - self.generate_merk_proof(&subtree, &query_at_path, left_to_right, limit,) + self.generate_merk_proof(&subtree, &query.items, query.left_to_right, limit) ); println!( - "generated merk proof at level path level [{}], limit is {:?}, has subqueries {}, {}", + "generated merk proof at level path level [{}], limit is {:?}, {}", path.iter() .map(|a| hex_to_ascii(*a)) .collect::>() .join("/"), overall_limit, - has_subqueries, - if left_to_right { + if query.left_to_right { "left to right" } else { "right to left" @@ -420,7 +422,9 @@ impl GroveDb { overall_limit.as_mut().map(|limit| *limit -= 1); has_a_result_at_level |= true; } - Ok(Element::Tree(Some(_), _)) if !done_with_results => { + Ok(Element::Tree(Some(_), _)) | Ok(Element::SumTree(Some(_), ..)) + if !done_with_results && query.has_subquery_on_key(key) => + { println!("found tree {}", hex_to_ascii(key)); // We only want to check in sub nodes for the proof if the tree has // elements @@ -446,34 +450,12 @@ impl GroveDb { } lower_layers.insert(key.clone(), layer_proof); } - Ok(Element::SumTree(Some(_), ..)) if !done_with_results => { - // We only want to check in sub nodes for the proof if the tree has - // elements - let mut lower_path = path.clone(); - lower_path.push(key.as_slice()); - let layer_proof = cost_return_on_error!( - &mut cost, - self.prove_subqueries( - lower_path, - path_query, - overall_limit, - prove_options, - ) - ); - lower_layers.insert(key.clone(), layer_proof); - if !has_subqueries { - overall_limit.as_mut().map(|limit| *limit -= 1); - has_a_result_at_level |= true; - } - } - Ok(Element::Tree(None, _)) | Ok(Element::SumTree(None, ..)) + Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) if !done_with_results => { - if !has_subqueries { - overall_limit.as_mut().map(|limit| *limit -= 1); - has_a_result_at_level |= true; - } + overall_limit.as_mut().map(|limit| *limit -= 1); + has_a_result_at_level |= true; } // todo: transform the unused trees into a Hash or KVHash to make proof // smaller Ok(Element::Tree(..)) if diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 938766a5..664113e0 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -117,7 +117,7 @@ impl GroveDb { result: &mut Vec, is_subset: bool, ) -> Result<[u8; 32], Error> { - let (query_items, left_to_right, _) = + let internal_query = query .query_items_at_path(current_path) .ok_or(Error::CorruptedPath(format!( @@ -131,17 +131,19 @@ impl GroveDb { )))?; let level_query = Query { - items: query_items.to_vec(), - default_subquery_branch: Default::default(), - conditional_subquery_branches: None, - left_to_right, + items: internal_query.items.to_vec(), + default_subquery_branch: internal_query.default_subquery_branch.into_owned(), + conditional_subquery_branches: internal_query + .conditional_subquery_branches + .map(|a| a.into_owned()), + left_to_right: internal_query.left_to_right, }; let (root_hash, merk_result) = execute_proof( &layer_proof.merk_proof, &level_query, Some(layer_proof.lower_layers.len() as u16), - left_to_right, + internal_query.left_to_right, ) .unwrap() .map_err(|e| { @@ -199,7 +201,7 @@ impl GroveDb { is_subset: bool, ) -> Result<[u8; 32], Error> { let in_path_proving = current_path.len() < query.path.len(); - let (query_items, left_to_right, _) = + let internal_query = query .query_items_at_path(current_path) .ok_or(Error::CorruptedPath(format!( @@ -213,17 +215,19 @@ impl GroveDb { )))?; let level_query = Query { - items: query_items.to_vec(), - default_subquery_branch: Default::default(), - conditional_subquery_branches: None, - left_to_right, + items: internal_query.items.to_vec(), + default_subquery_branch: internal_query.default_subquery_branch.into_owned(), + conditional_subquery_branches: internal_query + .conditional_subquery_branches + .map(|a| a.into_owned()), + left_to_right: internal_query.left_to_right, }; let (root_hash, merk_result) = execute_proof( &layer_proof.merk_proof, &level_query, *limit_left, - left_to_right, + internal_query.left_to_right, ) .unwrap() .map_err(|e| { @@ -290,15 +294,16 @@ impl GroveDb { path.iter().map(|p| p.to_vec()).collect(), proved_key_value, ); - limit_left.as_mut().map(|limit| *limit -= 1); - if limit_left == &Some(0) { - break; - } println!( "pushing {} limit left after is {:?}", &path_key_value, limit_left ); result.push(path_key_value); + + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; + } } } } diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 2a8ad6d4..05a5bdff 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -7,6 +7,7 @@ use grovedb_merk::proofs::query::query_item::QueryItem; use grovedb_merk::proofs::query::SubqueryBranch; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; +use indexmap::IndexMap; #[cfg(any(feature = "full", feature = "verify"))] use crate::query_result_type::PathKey; @@ -280,20 +281,13 @@ impl PathQuery { } } - pub fn query_items_at_path<'a>( - &'a self, - path: &[&[u8]], - ) -> Option<(Cow<'a, Vec>, LeftToRight, HasSubqueries)> { + pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option { fn recursive_query_items<'b>( query: &'b Query, path: &[&[u8]], - ) -> Option<(Cow<'b, Vec>, LeftToRight, HasSubqueries)> { + ) -> Option> { if path.is_empty() { - return Some(( - Cow::Borrowed(&query.items), - query.left_to_right, - query.has_subquery(), - )); + return Some(InternalCowItemsQuery::from_query(query)); } let key = path[0]; @@ -311,21 +305,15 @@ impl PathQuery { { return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &subquery_branch.subquery { - Some(( - Cow::Borrowed(&subquery.items), - subquery.left_to_right, - subquery.has_subquery(), - )) + Some(InternalCowItemsQuery::from_query(subquery)) } else { None } } else { - Some(( + Some(InternalCowItemsQuery::from_items_when_in_path( Cow::Owned(vec![QueryItem::Key( subquery_path[path_after_top_removed.len()].clone(), )]), - true, - false, )) }; } @@ -335,7 +323,9 @@ impl PathQuery { return if let Some(subquery) = &subquery_branch.subquery { recursive_query_items(subquery, &path[1..]) } else { - Some((Cow::Owned(vec![QueryItem::Key(key.to_vec())]), true, false)) + Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( + vec![QueryItem::Key(key.to_vec())], + ))) }; } } @@ -350,22 +340,16 @@ impl PathQuery { { return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &query.default_subquery_branch.subquery { - Some(( - Cow::Borrowed(&subquery.items), - subquery.left_to_right, - subquery.has_subquery(), - )) + Some(InternalCowItemsQuery::from_query(subquery)) } else { None } } else { - Some(( - Cow::Owned(vec![QueryItem::Key( + Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( + vec![QueryItem::Key( subquery_path[path_after_top_removed.len()].clone(), - )]), - true, - false, - )) + )], + ))) }; } } else if path_after_top_removed @@ -391,22 +375,16 @@ impl PathQuery { match given_path_len.cmp(&self_path_len) { Ordering::Less => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some(( - Cow::Owned(vec![QueryItem::Key(self.path[given_path_len].clone())]), - true, - false, - )) + Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( + vec![QueryItem::Key(self.path[given_path_len].clone())], + ))) } else { None } } Ordering::Equal => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some(( - Cow::Borrowed(&self.query.query.items), - self.query.query.left_to_right, - self.query.query.has_subquery(), - )) + Some(InternalCowItemsQuery::from_path_query(self)) } else { None } @@ -421,6 +399,69 @@ impl PathQuery { } } +/// This represents a query where the items might be borrowed, it is used to get +/// subquery information +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, Default, Clone, PartialEq)] +pub(crate) struct InternalCowItemsQuery<'a> { + /// Items + pub items: Cow<'a, Vec>, + /// Default subquery branch + pub default_subquery_branch: Cow<'a, SubqueryBranch>, + /// Conditional subquery branches + pub conditional_subquery_branches: Option>>, + /// Left to right? + pub left_to_right: bool, + /// In the path of the path_query, or in a subquery path + pub in_path: bool, +} + +impl<'a> InternalCowItemsQuery<'a> { + /// Checks to see if we have a subquery on a specific key + pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { + if self.default_subquery_branch.subquery.is_some() + || self.default_subquery_branch.subquery_path.is_some() + { + return true; + } + if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { + for query_item in conditional_subquery_branches.keys() { + if query_item.contains(key) { + return true; + } + } + } + return false; + } + + pub fn from_items_when_in_path(items: Cow>) -> InternalCowItemsQuery { + InternalCowItemsQuery { + items, + default_subquery_branch: Default::default(), + conditional_subquery_branches: None, + left_to_right: true, + in_path: true, + } + } + + pub fn from_path_query(path_query: &PathQuery) -> InternalCowItemsQuery { + Self::from_query(&path_query.query.query) + } + + pub fn from_query(query: &Query) -> InternalCowItemsQuery { + InternalCowItemsQuery { + items: Cow::Borrowed(&query.items), + default_subquery_branch: Cow::Borrowed(&query.default_subquery_branch), + conditional_subquery_branches: query + .conditional_subquery_branches + .as_ref() + .map(|conditional_subquery_branches| Cow::Borrowed(conditional_subquery_branches)), + left_to_right: query.left_to_right, + in_path: false, + } + } +} + #[cfg(feature = "full")] #[cfg(test)] mod tests { diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index 9b58e692..db7fabca 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -2105,7 +2105,7 @@ mod tests { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(6), None), /* we need to add a bigger limit because of + SizedQuery::new(query, Some(5), None), /* we need to add a bigger limit because of * empty proved sub trees */ ); let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); @@ -2314,7 +2314,7 @@ mod tests { let path_query = PathQuery::new( vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(7), None), /* we need 7 because of intermediate empty + SizedQuery::new(query, Some(6), None), /* we need 6 because of intermediate empty * trees in proofs */ ); let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index ff271ce1..ddc1a832 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -44,7 +44,7 @@ mod verify; #[cfg(any(feature = "full", feature = "verify"))] use std::cmp::Ordering; -use std::{collections::HashSet, fmt, ops::RangeFull}; +use std::{borrow::Cow, collections::HashSet, fmt, ops::RangeFull}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_costs::{cost_return_on_error, CostContext, CostResult, CostsExt, OperationCost}; diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 55ff5231..4098a051 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -54,6 +54,16 @@ pub fn execute_proof( limit: Option, left_to_right: bool, ) -> CostResult<(MerkHash, ProofVerificationResult), Error> { + println!( + "executing proof with limit {:?} going {} using query {}", + limit, + if left_to_right { + "left to right" + } else { + "right to left" + }, + query + ); let mut cost = OperationCost::default(); let mut output = Vec::with_capacity(query.len()); From 40b4cda93f9a09bc96fceb07ce75d102800cc5e3 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 5 Jul 2024 22:24:17 +0700 Subject: [PATCH 10/34] more fixes --- grovedb/src/operations/proof/generate.rs | 9 ++++++--- grovedb/src/query/mod.rs | 3 ++- grovedb/src/tests/mod.rs | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 139fe676..7de3be7a 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -254,7 +254,7 @@ impl GroveDb { self.query_raw( path_query, false, - true, + prove_options.decrease_limit_on_empty_sub_query_result, false, QueryResultType::QueryPathKeyElementTrioResultType, None @@ -269,7 +269,7 @@ impl GroveDb { self.query_raw( path_query, false, - true, + prove_options.decrease_limit_on_empty_sub_query_result, false, QueryResultType::QueryPathKeyElementTrioResultType, None @@ -472,7 +472,10 @@ impl GroveDb { } } - if !has_a_result_at_level && !done_with_results { + if !has_a_result_at_level + && !done_with_results + && prove_options.decrease_limit_on_empty_sub_query_result + { println!( "no results at level {}", path.iter() diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 05a5bdff..ab7c06ea 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -419,7 +419,8 @@ pub(crate) struct InternalCowItemsQuery<'a> { impl<'a> InternalCowItemsQuery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { - if self.default_subquery_branch.subquery.is_some() + if self.in_path + || self.default_subquery_branch.subquery.is_some() || self.default_subquery_branch.subquery_path.is_some() { return true; diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index db7fabca..8f937381 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -2244,7 +2244,7 @@ mod tests { // Now test without a limit to compare let path_query_no_limit = PathQuery::new( - vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], SizedQuery::new(main_query.clone(), None, None), ); @@ -2255,7 +2255,7 @@ mod tests { match verification_result_no_limit { Ok((hash, result_set)) => { assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + assert_eq!(result_set.len(), 29, "Expected 29 results without limit"); } Err(e) => { panic!("Proof verification failed (no limit): {:?}", e); From e3a477d29372d87f9c0d87bae4908150ebd398a8 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 01:42:36 +0700 Subject: [PATCH 11/34] fmt --- grovedb/src/error.rs | 17 + grovedb/src/operations/proof/generate.rs | 12 +- grovedb/src/operations/proof/util.rs | 204 +- grovedb/src/operations/proof/verify.rs | 400 +- grovedb/src/query_result_type.rs | 22 +- grovedb/src/tests/common.rs | 61 +- grovedb/src/tests/mod.rs | 1 - grovedb/src/tests/query_tests.rs | 4563 +++++++++++----------- merk/src/lib.rs | 4 - merk/src/proofs/query/mod.rs | 903 ++--- merk/src/proofs/query/verify.rs | 598 +-- 11 files changed, 3390 insertions(+), 3395 deletions(-) diff --git a/grovedb/src/error.rs b/grovedb/src/error.rs index 71a7e01e..c430c5ae 100644 --- a/grovedb/src/error.rs +++ b/grovedb/src/error.rs @@ -1,9 +1,14 @@ //! GroveDB Errors +use std::convert::Infallible; + /// GroveDB Errors #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, thiserror::Error)] pub enum Error { + #[error("infallible")] + /// This error can not happen, used for generics + Infallible, // Input data errors #[error("cyclic reference path")] /// Cyclic reference @@ -143,3 +148,15 @@ pub enum Error { /// Merk error MerkError(grovedb_merk::error::Error), } + +impl From for Error { + fn from(_value: Infallible) -> Self { + Self::Infallible + } +} + +impl From for Error { + fn from(value: grovedb_merk::Error) -> Self { + Error::MerkError(value) + } +} diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 7de3be7a..db491d1f 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,9 +1,6 @@ //! Generate proof operations -use std::{ - collections::{BTreeMap, BTreeSet}, - fmt, -}; +use std::{collections::BTreeMap, fmt}; use bincode::{Decode, Encode}; use derive_more::From; @@ -15,7 +12,7 @@ use grovedb_merk::{ proofs::{ encode_into, query::{Key, QueryItem}, - Decoder, Node, Op, Tree, + Decoder, Node, Op, }, tree::value_hash, Merk, ProofWithoutEncodingResult, @@ -24,9 +21,8 @@ use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; use crate::{ - query_result_type::{BTreeMapLevelResult, BTreeMapLevelResultOrItem, QueryResultType}, - reference_path::path_from_reference_path_type, - Element, Error, GroveDb, PathQuery, + query_result_type::QueryResultType, reference_path::path_from_reference_path_type, Element, + Error, GroveDb, PathQuery, }; #[derive(Debug, Clone, Copy, Encode, Decode)] diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index c68e1afe..9d86eb28 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -1,24 +1,62 @@ use std::fmt; -#[cfg(any(feature = "full", feature = "verify"))] -use std::io::Read; -#[cfg(feature = "full")] -use std::io::Write; use grovedb_merk::{ - proofs::query::{Key, Path, ProvedKeyValue}, - CryptoHash, + proofs::query::{Key, Path, ProvedKeyOptionalValue, ProvedKeyValue}, + CryptoHash, Error, }; -#[cfg(any(feature = "full", feature = "verify"))] -use integer_encoding::{VarInt, VarIntReader}; use crate::Element; #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedKeyValues = Vec; -/// Proved path-key-values +#[cfg(any(feature = "full", feature = "verify"))] +pub type ProvedKeyOptionalValues = Vec; + +#[cfg(any(feature = "full", feature = "verify"))] pub type ProvedPathKeyValues = Vec; +#[cfg(any(feature = "full", feature = "verify"))] +pub type ProvedPathKeyOptionalValues = Vec; + +/// Proved path-key-value +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, PartialEq, Eq)] +pub struct ProvedPathKeyOptionalValue { + /// Path + pub path: Path, + /// Key + pub key: Key, + /// Value + pub value: Option>, + /// Proof + pub proof: CryptoHash, +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedPathKeyOptionalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ProvedPathKeyValue {{\n")?; + write!( + f, + " path: [{}],\n", + self.path + .iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join(", ") + )?; + write!(f, " key: {},\n", hex_to_ascii(&self.key))?; + write!( + f, + " value: {},\n", + optional_element_hex_to_ascii(self.value.as_ref()) + )?; + write!(f, " proof: {}\n", hex::encode(self.proof))?; + write!(f, "}}") + } +} + /// Proved path-key-value #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, PartialEq, Eq)] @@ -34,7 +72,7 @@ pub struct ProvedPathKeyValue { } #[cfg(any(feature = "full", feature = "verify"))] -impl fmt::Display for ProvedPathKeyValue { +impl fmt::Display for crate::operations::proof::util::ProvedPathKeyValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ProvedPathKeyValue {{\n")?; write!( @@ -47,12 +85,66 @@ impl fmt::Display for ProvedPathKeyValue { .join(", ") )?; write!(f, " key: {},\n", hex_to_ascii(&self.key))?; - write!(f, " value: {},\n", element_hex_to_ascii(&self.value))?; + write!( + f, + " value: {},\n", + element_hex_to_ascii(self.value.as_ref()) + )?; write!(f, " proof: {}\n", hex::encode(self.proof))?; write!(f, "}}") } } +impl From for ProvedPathKeyOptionalValue { + fn from(value: ProvedPathKeyValue) -> Self { + let ProvedPathKeyValue { + path, + key, + value, + proof, + } = value; + + ProvedPathKeyOptionalValue { + path, + key, + value: Some(value), + proof, + } + } +} + +impl TryFrom for ProvedPathKeyValue { + type Error = Error; + + fn try_from(value: ProvedPathKeyOptionalValue) -> Result { + let ProvedPathKeyOptionalValue { + path, + key, + value, + proof, + } = value; + let value = value.ok_or(Error::InvalidProofError(format!( + "expected {}", + hex_to_ascii(&key) + )))?; + Ok(ProvedPathKeyValue { + path, + key, + value, + proof, + }) + } +} + +fn optional_element_hex_to_ascii(hex_value: Option<&Vec>) -> String { + match hex_value { + None => "None".to_string(), + Some(hex_value) => Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)), + } +} + fn element_hex_to_ascii(hex_value: &[u8]) -> String { Element::deserialize(hex_value) .map(|e| e.to_string()) @@ -86,28 +178,54 @@ impl ProvedPathKeyValue { } } +impl ProvedPathKeyOptionalValue { + // TODO: make path a reference + /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a + /// Path + pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyOptionalValue) -> Self { + Self { + path, + key: proved_key_value.key, + value: proved_key_value.value, + proof: proved_key_value.proof, + } + } + + /// Transforms multiple ProvedKeyValues to their equivalent + /// ProvedPathKeyValue given a Path + pub fn from_proved_key_values( + path: Path, + proved_key_values: ProvedKeyOptionalValues, + ) -> Vec { + proved_key_values + .into_iter() + .map(|pkv| Self::from_proved_key_value(path.clone(), pkv)) + .collect() + } +} + #[cfg(test)] mod tests { - use grovedb_merk::proofs::query::ProvedKeyValue; + use grovedb_merk::proofs::query::ProvedKeyOptionalValue; - use crate::operations::proof::util::ProvedPathKeyValue; + use crate::operations::proof::util::ProvedPathKeyOptionalValue; #[test] fn test_proved_path_from_single_proved_key_value() { let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value = ProvedKeyValue { + let proved_key_value = ProvedKeyOptionalValue { key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32], }; let proved_path_key_value = - ProvedPathKeyValue::from_proved_key_value(path.clone(), proved_key_value); + ProvedPathKeyOptionalValue::from_proved_key_value(path.clone(), proved_key_value); assert_eq!( proved_path_key_value, - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path, key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32] } ); @@ -116,49 +234,69 @@ mod tests { #[test] fn test_many_proved_path_from_many_proved_key_value() { let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value_a = ProvedKeyValue { + let proved_key_value_a = ProvedKeyOptionalValue { key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32], }; - let proved_key_value_b = ProvedKeyValue { + let proved_key_value_b = ProvedKeyOptionalValue { key: b"b".to_vec(), - value: vec![5, 7], + value: Some(vec![5, 7]), proof: [1; 32], }; - let proved_key_value_c = ProvedKeyValue { + let proved_key_value_c = ProvedKeyOptionalValue { key: b"c".to_vec(), - value: vec![6, 7], + value: Some(vec![6, 7]), proof: [2; 32], }; - let proved_key_values = vec![proved_key_value_a, proved_key_value_b, proved_key_value_c]; + let proved_key_value_d = ProvedKeyOptionalValue { + key: b"d".to_vec(), + value: None, + proof: [2; 32], + }; + let proved_key_values = vec![ + proved_key_value_a, + proved_key_value_b, + proved_key_value_c, + proved_key_value_d, + ]; let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path.clone(), proved_key_values); + ProvedPathKeyOptionalValue::from_proved_key_values(path.clone(), proved_key_values); assert_eq!(proved_path_key_values.len(), 3); assert_eq!( proved_path_key_values[0], - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path: path.clone(), key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32] } ); assert_eq!( proved_path_key_values[1], - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path: path.clone(), key: b"b".to_vec(), - value: vec![5, 7], + value: Some(vec![5, 7]), proof: [1; 32] } ); assert_eq!( proved_path_key_values[2], - ProvedPathKeyValue { - path, + ProvedPathKeyOptionalValue { + path: path.clone(), key: b"c".to_vec(), - value: vec![6, 7], + value: Some(vec![6, 7]), + proof: [2; 32] + } + ); + + assert_eq!( + proved_path_key_values[2], + ProvedPathKeyOptionalValue { + path, + key: b"d".to_vec(), + value: None, proof: [2; 32] } ); diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 664113e0..883d7c53 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -1,15 +1,14 @@ use std::collections::BTreeSet; use grovedb_merk::{ - execute_proof, - proofs::Query, + proofs::{query::VerifyOptions, Query}, tree::{combine_hash, value_hash}, }; use crate::{ operations::proof::{ generate::{GroveDBProof, GroveDBProofV0, LayerProof}, - util::{ProvedPathKeyValue, ProvedPathKeyValues}, + util::{ProvedPathKeyOptionalValue, ProvedPathKeyValues}, ProveOptions, }, query_result_type::PathKeyOptionalElementTrio, @@ -17,9 +16,10 @@ use crate::{ }; impl GroveDb { - pub fn verify_query( + pub fn verify_query_with_options( proof: &[u8], query: &PathQuery, + options: VerifyOptions, ) -> Result<([u8; 32], Vec), Error> { let config = bincode::config::standard() .with_big_endian() @@ -28,7 +28,7 @@ impl GroveDb { .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? .0; - let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, false)?; + let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, options)?; Ok((root_hash, result)) } @@ -44,7 +44,14 @@ impl GroveDb { .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? .0; - let (root_hash, result) = Self::verify_proof_raw_internal(&grovedb_proof, query, false)?; + let (root_hash, result) = Self::verify_proof_raw_internal( + &grovedb_proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: false, + }, + )?; Ok((root_hash, result)) } @@ -52,28 +59,28 @@ impl GroveDb { fn verify_proof_internal( proof: &GroveDBProof, query: &PathQuery, - is_subset: bool, + options: VerifyOptions, ) -> Result<([u8; 32], Vec), Error> { match proof { - GroveDBProof::V0(proof_v0) => { - Self::verify_proof_internal_v0(proof_v0, query, is_subset) - } + GroveDBProof::V0(proof_v0) => Self::verify_proof_internal_v0(proof_v0, query, options), } } fn verify_proof_internal_v0( proof: &GroveDBProofV0, query: &PathQuery, - is_subset: bool, + options: VerifyOptions, ) -> Result<([u8; 32], Vec), Error> { let mut result = Vec::new(); + let mut limit = query.query.limit; let root_hash = Self::verify_layer_proof( &proof.root_layer, &proof.prove_options, query, + &mut limit, &[], &mut result, - is_subset, + &options, )?; Ok((root_hash, result)) } @@ -81,11 +88,11 @@ impl GroveDb { fn verify_proof_raw_internal( proof: &GroveDBProof, query: &PathQuery, - is_subset: bool, + options: VerifyOptions, ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { match proof { GroveDBProof::V0(proof_v0) => { - Self::verify_proof_raw_internal_v0(proof_v0, query, is_subset) + Self::verify_proof_raw_internal_v0(proof_v0, query, options) } } } @@ -93,113 +100,118 @@ impl GroveDb { fn verify_proof_raw_internal_v0( proof: &GroveDBProofV0, query: &PathQuery, - is_subset: bool, + options: VerifyOptions, ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { let mut result = Vec::new(); let mut limit = query.query.limit; - let root_hash = Self::verify_layer_proof_raw( + let root_hash = Self::verify_layer_proof( &proof.root_layer, &proof.prove_options, query, &mut limit, &[], &mut result, - is_subset, + &options, )?; Ok((root_hash, result)) } - fn verify_layer_proof( - layer_proof: &LayerProof, - prove_options: &ProveOptions, - query: &PathQuery, - current_path: &[&[u8]], - result: &mut Vec, - is_subset: bool, - ) -> Result<[u8; 32], Error> { - let internal_query = - query - .query_items_at_path(current_path) - .ok_or(Error::CorruptedPath(format!( - "verify: path {} should be part of path_query {}", - current_path - .iter() - .map(hex::encode) - .collect::>() - .join("/"), - query - )))?; - - let level_query = Query { - items: internal_query.items.to_vec(), - default_subquery_branch: internal_query.default_subquery_branch.into_owned(), - conditional_subquery_branches: internal_query - .conditional_subquery_branches - .map(|a| a.into_owned()), - left_to_right: internal_query.left_to_right, - }; - - let (root_hash, merk_result) = execute_proof( - &layer_proof.merk_proof, - &level_query, - Some(layer_proof.lower_layers.len() as u16), - internal_query.left_to_right, - ) - .unwrap() - .map_err(|e| { - eprintln!("{e}"); - Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) - })?; - - let mut verified_keys = BTreeSet::new(); - - for proved_key_value in merk_result.result_set { - let mut path = current_path.to_vec(); - let key = proved_key_value.key; - let value = proved_key_value.value; - path.push(&key); - - verified_keys.insert(key.clone()); - - if let Some(lower_layer) = layer_proof.lower_layers.get(&key) { - let lower_hash = Self::verify_layer_proof( - lower_layer, - prove_options, - query, - &path, - result, - is_subset, - )?; - if lower_hash != value_hash(&value).value { - return Err(Error::InvalidProof("Mismatch in lower layer hash".into())); - } - } else { - let element = Element::deserialize(&value)?; - result.push(( - path.iter().map(|p| p.to_vec()).collect(), - key, - Some(element), - )); - } - } - - // if !is_subset { - // // Verify completeness only if not doing subset verification - // self.verify_completeness(&query_items, &merk_result.result_set, - // current_path)?; } - - Ok(root_hash) - } + // fn verify_layer_proof( + // layer_proof: &LayerProof, + // prove_options: &ProveOptions, + // query: &PathQuery, + // current_path: &[&[u8]], + // result: &mut Vec, + // options: &VerifyOptions, + // ) -> Result<[u8; 32], Error> { + // let internal_query = + // query + // .query_items_at_path(current_path) + // .ok_or(Error::CorruptedPath(format!( + // "verify: path {} should be part of path_query {}", + // current_path + // .iter() + // .map(hex::encode) + // .collect::>() + // .join("/"), + // query + // )))?; + // + // let level_query = Query { + // items: internal_query.items.to_vec(), + // default_subquery_branch: + // internal_query.default_subquery_branch.into_owned(), + // conditional_subquery_branches: internal_query + // .conditional_subquery_branches + // .map(|a| a.into_owned()), + // left_to_right: internal_query.left_to_right, + // }; + // + // let (root_hash, merk_result) = execute_proof( + // &layer_proof.merk_proof, + // &level_query, + // Some(layer_proof.lower_layers.len() as u16), + // internal_query.left_to_right, + // ) + // .unwrap() + // .map_err(|e| { + // eprintln!("{e}"); + // Error::InvalidProof(format!("invalid proof verification parameters: + // {}", e)) })?; + // + // let mut verified_keys = BTreeSet::new(); + // + // for proved_key_value in merk_result.result_set { + // let mut path = current_path.to_vec(); + // let key = proved_key_value.key; + // let value = proved_key_value.value; + // path.push(&key); + // + // verified_keys.insert(key.clone()); + // + // if let Some(lower_layer) = layer_proof.lower_layers.get(&key) { + // let lower_hash = Self::verify_layer_proof( + // lower_layer, + // prove_options, + // query, + // &path, + // result, + // options, + // )?; + // if lower_hash != value_hash(&value).value { + // return Err(Error::InvalidProof("Mismatch in lower layer + // hash".into())); } + // } else { + // let element = Element::deserialize(&value)?; + // result.push(( + // path.iter().map(|p| p.to_vec()).collect(), + // key, + // Some(element), + // )); + // } + // } + // + // // if !is_subset { + // // // Verify completeness only if not doing subset verification + // // self.verify_completeness(&query_items, &merk_result.result_set, + // // current_path)?; } + // + // Ok(root_hash) + // } - fn verify_layer_proof_raw( + fn verify_layer_proof( layer_proof: &LayerProof, prove_options: &ProveOptions, query: &PathQuery, limit_left: &mut Option, current_path: &[&[u8]], - result: &mut ProvedPathKeyValues, - is_subset: bool, - ) -> Result<[u8; 32], Error> { + result: &mut Vec, + options: &VerifyOptions, + ) -> Result<[u8; 32], Error> + where + T: TryFrom, + Error: From<>::Error>, + { let in_path_proving = current_path.len() < query.path.len(); let internal_query = query @@ -223,17 +235,17 @@ impl GroveDb { left_to_right: internal_query.left_to_right, }; - let (root_hash, merk_result) = execute_proof( - &layer_proof.merk_proof, - &level_query, - *limit_left, - internal_query.left_to_right, - ) - .unwrap() - .map_err(|e| { - eprintln!("{e}"); - Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) - })?; + let (root_hash, merk_result) = level_query + .execute_proof( + &layer_proof.merk_proof, + *limit_left, + internal_query.left_to_right, + ) + .unwrap() + .map_err(|e| { + eprintln!("{e}"); + Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) + })?; println!("merk result is {}", merk_result); @@ -245,65 +257,68 @@ impl GroveDb { for proved_key_value in merk_result.result_set { let mut path = current_path.to_vec(); let key = &proved_key_value.key; - let value = &proved_key_value.value; - let element = Element::deserialize(value)?; let hash = &proved_key_value.proof; - path.push(key); - - verified_keys.insert(key.clone()); - - if let Some(lower_layer) = layer_proof.lower_layers.get(key) { - match element { - Element::Tree(Some(v), _) | Element::SumTree(Some(v), ..) => { - let lower_hash = Self::verify_layer_proof_raw( - lower_layer, - prove_options, - query, - limit_left, - &path, - result, - is_subset, - )?; - let combined_root_hash = - combine_hash(value_hash(value).value(), &lower_hash) - .value() - .to_owned(); - if hash != &combined_root_hash { - return Err(Error::InvalidProof(format!( - "Mismatch in lower layer hash, expected {}, got {}", - hex::encode(hash), - hex::encode(combined_root_hash) - ))); + if let Some(value_bytes) = &proved_key_value.value { + let element = Element::deserialize(value_bytes)?; + + path.push(key); + + verified_keys.insert(key.clone()); + + if let Some(lower_layer) = layer_proof.lower_layers.get(key) { + match element { + Element::Tree(Some(_), _) | Element::SumTree(Some(_), ..) => { + let lower_hash = Self::verify_layer_proof( + lower_layer, + prove_options, + query, + limit_left, + &path, + result, + options, + )?; + let combined_root_hash = + combine_hash(value_hash(value_bytes).value(), &lower_hash) + .value() + .to_owned(); + if hash != &combined_root_hash { + return Err(Error::InvalidProof(format!( + "Mismatch in lower layer hash, expected {}, got {}", + hex::encode(hash), + hex::encode(combined_root_hash) + ))); + } + if limit_left == &Some(0) { + break; + } } - if limit_left == &Some(0) { - break; + Element::Tree(None, _) + | Element::SumTree(None, ..) + | Element::SumItem(..) + | Element::Item(..) + | Element::Reference(..) => { + return Err(Error::InvalidProof( + "Proof has lower layer for a non Tree".into(), + )); } } - Element::Tree(None, _) - | Element::SumTree(None, ..) - | Element::SumItem(..) - | Element::Item(..) - | Element::Reference(..) => { - return Err(Error::InvalidProof( - "Proof has lower layer for a non Tree".into(), - )); + } else if !in_path_proving { + let path_key_optional_value = + ProvedPathKeyOptionalValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, + ); + println!( + "pushing {} limit left after is {:?}", + &path_key_optional_value, limit_left + ); + result.push(path_key_optional_value.try_into()?); + + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; } } - } else if !in_path_proving { - let path_key_value = ProvedPathKeyValue::from_proved_key_value( - path.iter().map(|p| p.to_vec()).collect(), - proved_key_value, - ); - println!( - "pushing {} limit left after is {:?}", - &path_key_value, limit_left - ); - result.push(path_key_value); - - limit_left.as_mut().map(|limit| *limit -= 1); - if limit_left == &Some(0) { - break; - } } } } @@ -440,36 +455,59 @@ impl GroveDb { // new_key // } - pub fn verify_subset_query( + pub fn verify_query( proof: &[u8], query: &PathQuery, ) -> Result<([u8; 32], Vec), Error> { - let config = bincode::config::standard() - .with_big_endian() - .with_no_limit(); - let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) - .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? - .0; - - let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, true)?; + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: true, + }, + ) + } - Ok((root_hash, result)) + pub fn verify_subset_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<([u8; 32], Vec), Error> { + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: false, + }, + ) } pub fn verify_query_with_absence_proof( proof: &[u8], query: &PathQuery, ) -> Result<([u8; 32], Vec), Error> { - // This is now handled within verify_proof_internal - Self::verify_query(proof, query) + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: true, + }, + ) } pub fn verify_subset_query_with_absence_proof( proof: &[u8], query: &PathQuery, ) -> Result<([u8; 32], Vec), Error> { - // Subset queries don't verify absence, so this is the same as - // verify_subset_query - Self::verify_subset_query(proof, query) + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: false, + }, + ) } } diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 5f89fdda..e046d5b3 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -8,7 +8,10 @@ use std::{ pub use grovedb_merk::proofs::query::{Key, Path, PathKey}; -use crate::{operations::proof::util::ProvedPathKeyValue, Element, Error}; +use crate::{ + operations::proof::util::{ProvedPathKeyOptionalValue, ProvedPathKeyValue}, + Element, Error, +}; #[derive(Copy, Clone)] /// Query result type @@ -522,6 +525,23 @@ impl TryFrom for PathKeyOptionalElementTrio { } } +#[cfg(any(feature = "full", feature = "verify"))] +impl TryFrom for PathKeyOptionalElementTrio { + type Error = Error; + + fn try_from(proved_path_key_value: ProvedPathKeyOptionalValue) -> Result { + let element = proved_path_key_value + .value + .map(|e| Element::deserialize(e.as_slice())) + .transpose()?; + Ok(( + proved_path_key_value.path, + proved_path_key_value.key, + element, + )) + } +} + #[cfg(feature = "full")] #[cfg(test)] mod tests { diff --git a/grovedb/src/tests/common.rs b/grovedb/src/tests/common.rs index 7cd43bd4..367cde5b 100644 --- a/grovedb/src/tests/common.rs +++ b/grovedb/src/tests/common.rs @@ -1,36 +1,11 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Common tests use grovedb_path::SubtreePath; -use crate::{operations::proof::util::ProvedPathKeyValues, Element, Error}; +use crate::{ + operations::proof::util::{ProvedPathKeyOptionalValues, ProvedPathKeyValues}, + Element, Error, +}; /// Compare result tuples pub fn compare_result_tuples( @@ -44,6 +19,17 @@ pub fn compare_result_tuples( } } +pub fn compare_optional_value_result_tuples( + result_set: ProvedPathKeyOptionalValues, + expected_result_set: Vec<(Vec, Option>)>, +) { + assert_eq!(expected_result_set.len(), result_set.len()); + for i in 0..expected_result_set.len() { + assert_eq!(expected_result_set[i].0, result_set[i].key); + assert_eq!(expected_result_set[i].1, result_set[i].value); + } +} + fn deserialize_and_extract_item_bytes(raw_bytes: &[u8]) -> Result, Error> { let elem = Element::deserialize(raw_bytes)?; match elem { @@ -62,4 +48,21 @@ pub fn compare_result_sets(elements: &Vec>, result_set: &ProvedPathKeyVa } } +/// Compare result sets +pub fn compare_optional_value_result_sets( + elements: &Vec>>, + result_set: &ProvedPathKeyOptionalValues, +) { + for i in 0..elements.len() { + assert_eq!( + result_set[i] + .value + .as_ref() + .map(|a| deserialize_and_extract_item_bytes(a) + .expect("expected to extract item bytes")), + elements[i] + ) + } +} + pub(crate) const EMPTY_PATH: SubtreePath<'static, [u8; 0]> = SubtreePath::empty(); diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index 8f937381..43a9c34d 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -19,7 +19,6 @@ use tempfile::TempDir; use self::common::EMPTY_PATH; use super::*; use crate::{ - operations::proof::ProveOptions, query_result_type::{QueryResultType, QueryResultType::QueryKeyElementPairResultType}, reference_path::ReferencePathType, tests::common::compare_result_tuples, diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 04998bfa..4aa58e00 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -1,131 +1,164 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Query tests - -use grovedb_merk::proofs::{query::QueryItem, Query}; -use rand::Rng; -use tempfile::TempDir; - -use crate::{ - batch::GroveDbOp, - operations::proof::ProveOptions, - query_result_type::{PathKeyOptionalElementTrio, QueryResultType}, - reference_path::ReferencePathType, - tests::{ - common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, - ANOTHER_TEST_LEAF, TEST_LEAF, - }, - Element, GroveDb, PathQuery, SizedQuery, -}; - -fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); - db.insert( - [TEST_LEAF].as_ref(), - &i_vec, - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree - db.insert( - [TEST_LEAF, i_vec.as_slice()].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); +mod tests { + //! Query tests + + use grovedb_merk::proofs::{query::QueryItem, Query}; + use rand::Rng; + use tempfile::TempDir; + + use crate::{ + batch::GroveDbOp, + query_result_type::QueryResultType, + reference_path::ReferencePathType, + tests::{ + common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, TEST_LEAF, + }, + Element, GroveDb, PathQuery, SizedQuery, + }; - for j in 100u32..150 { - let mut j_vec = i_vec.clone(); - j_vec.append(&mut j.to_be_bytes().to_vec()); + fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, i_vec.as_slice(), b"\0"].as_ref(), - &j_vec.clone(), - Element::new_item(j_vec), + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice()].as_ref(), + b"\0", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for j in 100u32..150 { + let mut j_vec = i_vec.clone(); + j_vec.append(&mut j.to_be_bytes().to_vec()); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"\0"].as_ref(), + &j_vec.clone(), + Element::new_item(j_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + } + + fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 0u32..10 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice()].as_ref(), + b"a", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for j in 25u32..50 { + let j_vec = j.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a"].as_ref(), + &j_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a", j_vec.as_slice()].as_ref(), + b"\0", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for k in 100u32..110 { + let k_vec = k.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a", &j_vec, b"\0"].as_ref(), + &k_vec.clone(), + Element::new_item(k_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } } } -} -fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 0u32..10 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { + // This subtree will be holding values db.insert( [TEST_LEAF].as_ref(), - &i_vec, + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree + + // This subtree will be holding references db.insert( - [TEST_LEAF, i_vec.as_slice()].as_ref(), - b"a", + [TEST_LEAF].as_ref(), + b"1", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - - for j in 25u32..50 { - let j_vec = j.to_be_bytes().to_vec(); + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a"].as_ref(), - &j_vec, + [TEST_LEAF, b"1"].as_ref(), + &i_vec, Element::empty_tree(), None, None, ) .unwrap() - .expect("successful value insert"); - + .expect("successful subtree insert"); // Insert element 0 // Insert some elements into subtree db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a", j_vec.as_slice()].as_ref(), + [TEST_LEAF, b"1", i_vec.as_slice()].as_ref(), b"\0", Element::empty_tree(), None, @@ -134,12 +167,30 @@ fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { .unwrap() .expect("successful subtree insert"); - for k in 100u32..110 { - let k_vec = k.to_be_bytes().to_vec(); + for j in 100u32..150 { + let random_key = rand::thread_rng().gen::<[u8; 32]>(); + let mut j_vec = i_vec.clone(); + j_vec.append(&mut j.to_be_bytes().to_vec()); + + // We should insert every item to the tree holding items + db.insert( + [TEST_LEAF, b"\0"].as_ref(), + &random_key, + Element::new_item(j_vec.clone()), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a", &j_vec, b"\0"].as_ref(), - &k_vec.clone(), - Element::new_item(k_vec), + [TEST_LEAF, b"1", i_vec.clone().as_slice(), b"\0"].as_ref(), + &random_key, + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"\0".to_vec(), + random_key.to_vec(), + ])), None, None, ) @@ -148,47 +199,49 @@ fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { } } } -} -fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { - // This subtree will be holding values - db.insert( - [TEST_LEAF].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // This subtree will be holding references - db.insert( - [TEST_LEAF].as_ref(), - b"1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_for_unique_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, &i_vec.clone()].as_ref(), + b"\0", + Element::new_item(i_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + + fn populate_tree_by_reference_for_unique_range_subquery(db: &TempGroveDb) { + // This subtree will be holding values db.insert( - [TEST_LEAF, b"1"].as_ref(), - &i_vec, + [TEST_LEAF].as_ref(), + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree + + // This subtree will be holding references db.insert( - [TEST_LEAF, b"1", i_vec.as_slice()].as_ref(), - b"\0", + [TEST_LEAF].as_ref(), + b"1", Element::empty_tree(), None, None, @@ -196,29 +249,37 @@ fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { .unwrap() .expect("successful subtree insert"); - for j in 100u32..150 { - let random_key = rand::thread_rng().gen::<[u8; 32]>(); - let mut j_vec = i_vec.clone(); - j_vec.append(&mut j.to_be_bytes().to_vec()); + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, b"1"].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); // We should insert every item to the tree holding items db.insert( [TEST_LEAF, b"\0"].as_ref(), - &random_key, - Element::new_item(j_vec.clone()), + &i_vec, + Element::new_item(i_vec.clone()), None, None, ) .unwrap() .expect("successful value insert"); + // We should insert a reference to the item db.insert( - [TEST_LEAF, b"1", i_vec.clone().as_slice(), b"\0"].as_ref(), - &random_key, + [TEST_LEAF, b"1", i_vec.clone().as_slice()].as_ref(), + b"\0", Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ TEST_LEAF.to_vec(), b"\0".to_vec(), - random_key.to_vec(), + i_vec.clone(), ])), None, None, @@ -227,2510 +288,2440 @@ fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { .expect("successful value insert"); } } -} -fn populate_tree_for_unique_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_for_unique_range_subquery_with_non_unique_null_values(db: &mut TempGroveDb) { + populate_tree_for_unique_range_subquery(db); + db.insert([TEST_LEAF].as_ref(), &[], Element::empty_tree(), None, None) + .unwrap() + .expect("successful subtree insert"); db.insert( - [TEST_LEAF].as_ref(), - &i_vec, + [TEST_LEAF, &[]].as_ref(), + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); + // Insert a couple of subtrees first + for i in 100u32..200 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, &[], b"\0"].as_ref(), + &i_vec, + Element::new_item(i_vec.clone()), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + fn populate_tree_for_uneven_keys(db: &TempGroveDb) { db.insert( - [TEST_LEAF, &i_vec.clone()].as_ref(), - b"\0", - Element::new_item(i_vec), + [TEST_LEAF].as_ref(), + "b".as_ref(), + Element::new_item(1u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); - } -} + .expect("successful subtree insert"); -fn populate_tree_by_reference_for_unique_range_subquery(db: &TempGroveDb) { - // This subtree will be holding values - db.insert( - [TEST_LEAF].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // This subtree will be holding references - db.insert( - [TEST_LEAF].as_ref(), - b"1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, b"1"].as_ref(), - &i_vec, - Element::empty_tree(), + [TEST_LEAF].as_ref(), + "ab".as_ref(), + Element::new_item(2u8.to_be_bytes().to_vec()), None, None, ) .unwrap() .expect("successful subtree insert"); - // We should insert every item to the tree holding items db.insert( - [TEST_LEAF, b"\0"].as_ref(), - &i_vec, - Element::new_item(i_vec.clone()), + [TEST_LEAF].as_ref(), + "x".as_ref(), + Element::new_item(3u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); - // We should insert a reference to the item db.insert( - [TEST_LEAF, b"1", i_vec.clone().as_slice()].as_ref(), - b"\0", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"\0".to_vec(), - i_vec.clone(), - ])), + [TEST_LEAF].as_ref(), + &[3; 32], + Element::new_item(4u8.to_be_bytes().to_vec()), None, None, ) - .unwrap() - .expect("successful value insert"); - } -} - -fn populate_tree_for_unique_range_subquery_with_non_unique_null_values(db: &mut TempGroveDb) { - populate_tree_for_unique_range_subquery(db); - db.insert([TEST_LEAF].as_ref(), &[], Element::empty_tree(), None, None) .unwrap() .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, &[]].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert a couple of subtrees first - for i in 100u32..200 { - let i_vec = i.to_be_bytes().to_vec(); + db.insert( - [TEST_LEAF, &[], b"\0"].as_ref(), - &i_vec, - Element::new_item(i_vec.clone()), + [TEST_LEAF].as_ref(), + "k".as_ref(), + Element::new_item(5u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); } -} -fn populate_tree_for_uneven_keys(db: &TempGroveDb) { - db.insert( - [TEST_LEAF].as_ref(), - "b".as_ref(), - Element::new_item(1u8.to_be_bytes().to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - "ab".as_ref(), - Element::new_item(2u8.to_be_bytes().to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - "x".as_ref(), - Element::new_item(3u8.to_be_bytes().to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - &[3; 32], - Element::new_item(4u8.to_be_bytes().to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - "k".as_ref(), - Element::new_item(5u8.to_be_bytes().to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); -} + #[test] + fn test_get_correct_order() { + let db = make_test_grovedb(); + populate_tree_for_uneven_keys(&db); -#[test] -fn test_get_correct_order() { - let db = make_test_grovedb(); - populate_tree_for_uneven_keys(&db); + let path = vec![TEST_LEAF.to_vec()]; + let query = Query::new_range_full(); - let path = vec![TEST_LEAF.to_vec()]; - let query = Query::new_range_full(); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements, vec![vec![4], vec![2], vec![1], vec![5], vec![3]]); + } - assert_eq!(elements, vec![vec![4], vec![2], vec![1], vec![5], vec![3]]); -} + #[test] + fn test_get_range_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 200); - assert_eq!(elements.len(), 200); + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_query_with_unique_subquery() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&mut db); -#[test] -fn test_get_range_query_with_unique_subquery() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&mut db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 4); - assert_eq!(elements.len(), 4); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1991_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1991_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_query_with_unique_subquery_on_references() { + let db = make_test_grovedb(); + populate_tree_by_reference_for_unique_range_subquery(&db); -#[test] -fn test_get_range_query_with_unique_subquery_on_references() { - let db = make_test_grovedb(); - populate_tree_by_reference_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 4); - assert_eq!(elements.len(), 4); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1991_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1991_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_query_with_unique_subquery_with_non_unique_null_values() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); -#[test] -fn test_get_range_query_with_unique_subquery_with_non_unique_null_values() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_all(); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_all(); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![b"\0".to_vec()]), + Some(subquery), + ); - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![b"\0".to_vec()]), - Some(subquery), - ); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 115); - assert_eq!(elements.len(), 115); + let first_value = 100_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 100_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 115); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 115); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_query_with_unique_subquery_ignore_non_unique_null_values() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); -#[test] -fn test_get_range_query_with_unique_subquery_ignore_non_unique_null_values() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_all(); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_all(); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let subquery = Query::new(); - let subquery = Query::new(); + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![b"\0".to_vec()]), + Some(subquery), + ); - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![b"\0".to_vec()]), - Some(subquery), - ); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 15); - assert_eq!(elements.len(), 15); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 15); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 15); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 400); - assert_eq!(elements.len(), 400); + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 400); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 400); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_inclusive_query_with_non_unique_subquery_on_references() { + let db = make_test_grovedb(); + populate_tree_by_reference_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_inclusive_query_with_non_unique_subquery_on_references() { - let db = make_test_grovedb(); - populate_tree_by_reference_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements.len(), 400); + + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + // using contains as the elements get stored at random key locations + // hence impossible to predict the final location + // but must exist + assert!(elements.contains(&first_value)); + + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert!(elements.contains(&last_value)); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 400); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); - - assert_eq!(elements.len(), 400); - - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - // using contains as the elements get stored at random key locations - // hence impossible to predict the final location - // but must exist - assert!(elements.contains(&first_value)); - - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert!(elements.contains(&last_value)); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 400); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_inclusive_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); -#[test] -fn test_get_range_inclusive_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 8); - assert_eq!(elements.len(), 8); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 8); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 8); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_from_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_from_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 250); - assert_eq!(elements.len(), 250); + let mut first_value = 1995_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1995_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1999_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1999_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_from_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); -#[test] -fn test_get_range_from_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 5); - assert_eq!(elements.len(), 5); + let first_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_to_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_to_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 500); - assert_eq!(elements.len(), 500); + let mut first_value = 1985_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1985_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1994_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1994_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 500); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 500); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_to_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); -#[test] -fn test_get_range_to_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 10); - assert_eq!(elements.len(), 10); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1994_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1994_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 10); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 10); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_to_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_to_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 550); - assert_eq!(elements.len(), 550); + let mut first_value = 1985_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1985_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 550); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 550); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(false); + query.insert_range_to_inclusive(..=5000_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(false); - query.insert_range_to_inclusive(..=5000_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 750); - assert_eq!(elements.len(), 750); + let mut first_value = 1999_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1999_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1985_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1985_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 750); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 750); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_to_inclusive_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); -#[test] -fn test_get_range_to_inclusive_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 11); - assert_eq!(elements.len(), 11); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 11); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_after_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_after_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after(1995_u32.to_be_bytes().to_vec()..); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after(1995_u32.to_be_bytes().to_vec()..); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 200); - assert_eq!(elements.len(), 200); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1999_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1999_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_after_to_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_after_to_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after_to( + 1995_u32.to_be_bytes().to_vec()..1997_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after_to(1995_u32.to_be_bytes().to_vec()..1997_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 50); - assert_eq!(elements.len(), 50); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1996_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1996_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 50); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 50); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_after_to_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_after_to_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after_to_inclusive( + 1995_u32.to_be_bytes().to_vec()..=1997_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after_to_inclusive( - 1995_u32.to_be_bytes().to_vec()..=1997_u32.to_be_bytes().to_vec(), - ); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 100); - assert_eq!(elements.len(), 100); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1997_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1997_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 100); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 100); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_after_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_after_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(false); + query.insert_range_after_to_inclusive( + 1995_u32.to_be_bytes().to_vec()..=5000_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(false); - query.insert_range_after_to_inclusive( - 1995_u32.to_be_bytes().to_vec()..=5000_u32.to_be_bytes().to_vec(), - ); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 200); - assert_eq!(elements.len(), 200); + let mut first_value = 1999_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1999_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1996_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1996_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_inclusive_query_with_double_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_double_range_subquery(&db); -#[test] -fn test_get_range_inclusive_query_with_double_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_double_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive((3u32).to_be_bytes().to_vec()..=(4u32).to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive((3u32).to_be_bytes().to_vec()..=(4u32).to_be_bytes().to_vec()); + query.set_subquery_key(b"a".to_vec()); - query.set_subquery_key(b"a".to_vec()); + let mut subquery = Query::new(); + subquery.insert_range_inclusive( + (29u32).to_be_bytes().to_vec()..=(31u32).to_be_bytes().to_vec(), + ); - let mut subquery = Query::new(); - subquery - .insert_range_inclusive((29u32).to_be_bytes().to_vec()..=(31u32).to_be_bytes().to_vec()); + subquery.set_subquery_key(b"\0".to_vec()); - subquery.set_subquery_key(b"\0".to_vec()); + let mut subsubquery = Query::new(); + subsubquery.insert_all(); - let mut subsubquery = Query::new(); - subsubquery.insert_all(); + subquery.set_subquery(subsubquery); - subquery.set_subquery(subsubquery); + query.set_subquery(subquery); - query.set_subquery(subquery); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 60); - assert_eq!(elements.len(), 60); + let first_value = 100_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 100_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 109_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let last_value = 109_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 60); + compare_result_sets(&elements, &result_set); + } - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_query_with_limit_and_offset() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_query_with_limit_and_offset() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(true); + query.insert_range(1990_u32.to_be_bytes().to_vec()..1995_u32.to_be_bytes().to_vec()); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(true); - query.insert_range(1990_u32.to_be_bytes().to_vec()..1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + // Baseline query: no offset or limit + left to right + let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); - // Baseline query: no offset or limit + left to right - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 250); - assert_eq!(elements.len(), 250); + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1994_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1994_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); + subquery.left_to_right = false; - subquery.left_to_right = false; + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + query.left_to_right = false; - query.left_to_right = false; + // Baseline query: no offset or limit + right to left + let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); - // Baseline query: no offset or limit + right to left - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 250); - assert_eq!(elements.len(), 250); + let mut first_value = 1994_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1994_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1990_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1990_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); + subquery.left_to_right = true; - subquery.left_to_right = true; + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + query.left_to_right = true; - query.left_to_right = true; + // Limit the result to just 55 elements + let path_query = + PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(55), None)); - // Limit the result to just 55 elements - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(55), None)); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 55); - assert_eq!(elements.len(), 55); + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + // Second tree 5 element [100, 101, 102, 103, 104] + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 104_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - // Second tree 5 element [100, 101, 102, 103, 104] - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 104_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 55); + compare_result_sets(&elements, &result_set); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 55); - compare_result_sets(&elements, &result_set); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + // Limit the result set to 60 elements but skip the first 14 elements + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(60), Some(14)), + ); - // Limit the result set to 60 elements but skip the first 14 elements - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(60), Some(14)), - ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 60); - assert_eq!(elements.len(), 60); + // Skips the first 14 elements, starts from the 15th + // i.e skips [100 - 113] starts from 114 + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 114_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - // Skips the first 14 elements, starts from the 15th - // i.e skips [100 - 113] starts from 114 - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 114_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + // Continues for 60 iterations + // Takes 36 elements from the first tree (50 - 14) + // takes the remaining 24 from the second three (60 - 36) + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 123_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - // Continues for 60 iterations - // Takes 36 elements from the first tree (50 - 14) - // takes the remaining 24 from the second three (60 - 36) - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 123_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + query.left_to_right = false; - query.left_to_right = false; + // Limit the result set to 60 element but skip first 10 elements (this time + // right to left) + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(60), Some(10)), + ); - // Limit the result set to 60 element but skip first 10 elements (this time - // right to left) - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(60), Some(10)), - ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 60); - assert_eq!(elements.len(), 60); + // Skips the first 10 elements from the back + // last tree and starts from the 11th before the end + // Underlying subquery is ascending + let mut first_value = 1994_u32.to_be_bytes().to_vec(); + first_value.append(&mut 110_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - // Skips the first 10 elements from the back - // last tree and starts from the 11th before the end - // Underlying subquery is ascending - let mut first_value = 1994_u32.to_be_bytes().to_vec(); - first_value.append(&mut 110_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let mut last_value = 1993_u32.to_be_bytes().to_vec(); + last_value.append(&mut 119_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let mut last_value = 1993_u32.to_be_bytes().to_vec(); - last_value.append(&mut 119_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + query.left_to_right = true; - query.left_to_right = true; + // Offset bigger than elements in range + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), None, Some(5000)), + ); - // Offset bigger than elements in range - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), None, Some(5000)), - ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 0); - assert_eq!(elements.len(), 0); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery); + // Limit bigger than elements in range + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(5000), None), + ); - // Limit bigger than elements in range - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(5000), None), - ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 250); - assert_eq!(elements.len(), 250); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); + // Test on unique subtree build + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - // Test on unique subtree build - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let mut query = Query::new_with_direction(true); + query.insert_range(1990_u32.to_be_bytes().to_vec()..2000_u32.to_be_bytes().to_vec()); - let mut query = Query::new_with_direction(true); - query.insert_range(1990_u32.to_be_bytes().to_vec()..2000_u32.to_be_bytes().to_vec()); + query.set_subquery_key(subquery_key); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(5), Some(2))); - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(5), Some(2))); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 5); - assert_eq!(elements.len(), 5); + let first_value = 1992_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - let first_value = 1992_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let last_value = 1996_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); + } - let last_value = 1996_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); -} + #[test] + fn test_correct_child_root_hash_propagation_for_parent_in_same_batch() { + let tmp_dir = TempDir::new().unwrap(); + let db = GroveDb::open(tmp_dir.path()).unwrap(); + let tree_name_slice: &[u8] = &[ + 2, 17, 40, 46, 227, 17, 179, 211, 98, 50, 130, 107, 246, 26, 147, 45, 234, 189, 245, + 77, 252, 86, 99, 107, 197, 226, 188, 54, 239, 64, 17, 37, + ]; + + let batch = vec![GroveDbOp::insert_op(vec![], vec![1], Element::empty_tree())]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); -#[test] -fn test_correct_child_root_hash_propagation_for_parent_in_same_batch() { - let tmp_dir = TempDir::new().unwrap(); - let db = GroveDb::open(tmp_dir.path()).unwrap(); - let tree_name_slice: &[u8] = &[ - 2, 17, 40, 46, 227, 17, 179, 211, 98, 50, 130, 107, 246, 26, 147, 45, 234, 189, 245, 77, - 252, 86, 99, 107, 197, 226, 188, 54, 239, 64, 17, 37, - ]; - - let batch = vec![GroveDbOp::insert_op(vec![], vec![1], Element::empty_tree())]; - db.apply_batch(batch, None, None) - .unwrap() - .expect("should apply batch"); + let batch = vec![ + GroveDbOp::insert_op( + vec![vec![1]], + tree_name_slice.to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec()], + b"\0".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec()], + vec![1], + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec(), vec![1]], + b"person".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + ], + b"\0".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + ], + b"firstName".to_vec(), + Element::empty_tree(), + ), + ]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); + + let batch = vec![ + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"\0".to_vec(), + ], + b"person_id_1".to_vec(), + Element::new_item(vec![50]), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + ], + b"cammi".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + b"cammi".to_vec(), + ], + b"\0".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + b"cammi".to_vec(), + b"\0".to_vec(), + ], + b"person_ref_id".to_vec(), + Element::new_reference(ReferencePathType::UpstreamRootHeightReference( + 4, + vec![b"\0".to_vec(), b"person_id_1".to_vec()], + )), + ), + ]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); - let batch = vec![ - GroveDbOp::insert_op( - vec![vec![1]], + let path = vec![ + vec![1], tree_name_slice.to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec()], - b"\0".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec()], vec![1], - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec(), vec![1]], b"person".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - ], - b"\0".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - ], b"firstName".to_vec(), - Element::empty_tree(), - ), - ]; - db.apply_batch(batch, None, None) - .unwrap() - .expect("should apply batch"); + ]; + let mut query = Query::new(); + query.insert_all(); + query.set_subquery_key(b"\0".to_vec()); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery(subquery); + let path_query = PathQuery::new( + path, + SizedQuery { + query: query.clone(), + limit: Some(100), + offset: Some(0), + }, + ); + + let proof = db + .prove_query(&path_query, None) + .unwrap() + .expect("expected successful proving"); + let (hash, _result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + } - let batch = vec![ - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"\0".to_vec(), - ], - b"person_id_1".to_vec(), - Element::new_item(vec![50]), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - ], - b"cammi".to_vec(), + #[test] + fn test_mixed_level_proofs() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - b"cammi".to_vec(), - ], - b"\0".to_vec(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::new_item(vec![1]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key3", Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - b"cammi".to_vec(), - b"\0".to_vec(), - ], - b"person_ref_id".to_vec(), - Element::new_reference(ReferencePathType::UpstreamRootHeightReference( - 4, - vec![b"\0".to_vec(), b"person_id_1".to_vec()], - )), - ), - ]; - db.apply_batch(batch, None, None) + None, + None, + ) .unwrap() - .expect("should apply batch"); - - let path = vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - ]; - let mut query = Query::new(); - query.insert_all(); - query.set_subquery_key(b"\0".to_vec()); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery(subquery); - let path_query = PathQuery::new( - path, - SizedQuery { - query: query.clone(), - limit: Some(100), - offset: Some(0), - }, - ); - - let proof = db - .prove_query(&path_query, None) + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key4", + Element::new_reference(ReferencePathType::SiblingReference(b"key2".to_vec())), + None, + None, + ) .unwrap() - .expect("expected successful proving"); - let (hash, _result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); -} + .expect("successful subtree insert"); -#[test] -fn test_mixed_level_proofs() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::new_item(vec![1]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::new_reference(ReferencePathType::SiblingReference(b"key2".to_vec())), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k1", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k2", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k3", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k1", + Element::new_item(vec![2]), + None, + None, + ) .unwrap() - .expect("successful get_path_query"); + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k2", + Element::new_item(vec![3]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k3", + Element::new_item(vec![4]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); - assert_eq!(elements.len(), 5); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); + let path = vec![TEST_LEAF.to_vec()]; - // Test mixed element proofs with limit and offset - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - assert_eq!(elements.len(), 5); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); + assert_eq!(elements.len(), 5); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); - // TODO: Fix noticed bug when limit and offset are both set to Some(0) + // Test mixed element proofs with limit and offset + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(1), None)); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); - - assert_eq!(elements.len(), 1); - assert_eq!(elements, vec![vec![2]]); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - compare_result_sets(&elements, &result_set); - - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(3), Some(0)), - ); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); - - assert_eq!(elements.len(), 3); - assert_eq!(elements, vec![vec![2], vec![3], vec![4]]); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - compare_result_sets(&elements, &result_set); - - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(4), Some(0)), - ); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); + assert_eq!(elements.len(), 5); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - assert_eq!(elements.len(), 4); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1]]); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); + // TODO: Fix noticed bug when limit and offset are both set to Some(0) - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(10), Some(4))); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); + let path_query = + PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(1), None)); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); + + assert_eq!(elements.len(), 1); + assert_eq!(elements, vec![vec![2]]); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + compare_result_sets(&elements, &result_set); + + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(3), Some(0)), + ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); + + assert_eq!(elements.len(), 3); + assert_eq!(elements, vec![vec![2], vec![3], vec![4]]); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + compare_result_sets(&elements, &result_set); + + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(4), Some(0)), + ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - assert_eq!(elements.len(), 1); - assert_eq!(elements, vec![vec![1]]); -} + assert_eq!(elements.len(), 4); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1]]); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); -#[test] -fn test_mixed_level_proofs_with_tree() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k1", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k2", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k3", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"k1", - Element::new_item(vec![5]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.add_conditional_subquery(QueryItem::Key(b"key1".to_vec()), None, Some(subquery)); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - - let (elements, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryResultType::QueryPathKeyElementTrioResultType, + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(10), Some(4))); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); + + assert_eq!(elements.len(), 1); + assert_eq!(elements, vec![vec![1]]); + } + + #[test] + fn test_mixed_level_proofs_with_tree() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k1", + Element::new_item(vec![2]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k2", + Element::new_item(vec![3]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k3", + Element::new_item(vec![4]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), + b"k1", + Element::new_item(vec![5]), + None, None, ) .unwrap() - .expect("expected successful get_path_query"); + .expect("successful item insert"); - assert_eq!(elements.len(), 5); + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.add_conditional_subquery(QueryItem::Key(b"key1".to_vec()), None, Some(subquery)); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); + let path = vec![TEST_LEAF.to_vec()]; - // TODO: verify that the result set is exactly the same - // compare_result_sets(&elements, &result_set); + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(1), None)); + let (elements, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryResultType::QueryPathKeyElementTrioResultType, - None, - ) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 5); - assert_eq!(elements.len(), 1); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - // TODO: verify that the result set is exactly the same - // compare_result_sets(&elements, &result_set); -} + // TODO: verify that the result set is exactly the same + // compare_result_sets(&elements, &result_set); -#[test] -fn test_mixed_level_proofs_with_subquery_paths() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"a", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"b", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"c", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"d", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"e", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"f", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"a", b"d"].as_ref(), - b"d", - Element::new_item(vec![6]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"b"].as_ref(), - b"g", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b"].as_ref(), - b"d", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"i", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"j", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"k", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // if you don't have an item at the subquery path translation, you shouldn't be - // added to the result set. - let mut query = Query::new(); - query.insert_all(); - query.set_subquery_path(vec![b"d".to_vec()]); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - // TODO: proofs seems to be more expressive than query_raw now - // let (elements, _) = db - // .query_raw( - // &path_query, - // true, - // QueryResultType::QueryPathKeyElementTrioResultType, - // None, - // ) - // .unwrap() - // .expect("expected successful get_path_query"); - // - // assert_eq!(elements.len(), 2); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - - // apply path translation then query - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery_path(vec![b"d".to_vec()]); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - // apply empty path translation - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery_path(vec![]); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - // use conditionals to return from more than 2 depth - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - let mut deeper_subquery = Query::new(); - deeper_subquery.insert_all(); - subquery.add_conditional_subquery(QueryItem::Key(b"d".to_vec()), None, Some(deeper_subquery)); - query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, Some(subquery.clone())); - query.add_conditional_subquery(QueryItem::Key(b"b".to_vec()), None, Some(subquery.clone())); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 8); -} + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(1), None)); -#[test] -fn test_proof_with_limit_zero() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec()], - SizedQuery::new(query, Some(0), Some(0)), - ); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + let (elements, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); -#[test] -fn test_result_set_path_after_verification() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - // assert the result set path - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - - // Test path tracking with subquery - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[3].path, - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] - ); - assert_eq!( - result_set[4].path, - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] - ); - - // Test path tracking with subquery path - // perform a query, do a translation, perform another query - let mut query = Query::new(); - query.insert_key(b"deep_leaf".to_vec()); - query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - let path_query = PathQuery::new_unsized(vec![], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - assert_eq!( - result_set[0].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - assert_eq!( - result_set[1].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - assert_eq!( - result_set[2].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - - // Test path tracking for mixed level result set - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.add_conditional_subquery(QueryItem::Key(b"innertree".to_vec()), None, Some(subq)); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!(result_set[3].path, vec![TEST_LEAF.to_vec()]); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - assert_eq!(result_set[3].key, b"innertree4".to_vec()); -} + assert_eq!(elements.len(), 1); -#[test] -fn test_verification_with_path_key_optional_element_trio() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + // TODO: verify that the result set is exactly the same + // compare_result_sets(&elements, &result_set); + } + + #[test] + fn test_mixed_level_proofs_with_subquery_paths() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"a", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[1], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"b", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[2], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"c", + Element::empty_tree(), + None, + None, ) - ); -} + .unwrap() + .expect("successful subtree insert"); -#[test] -fn test_absence_proof() { - let db = make_deep_tree(); - - // simple case, request for items k2..=k5 under inner tree - // we pass them as keys as terminal keys does not handle ranges with start or - // end len greater than 1 k2, k3 should be Some, k4, k5 should be None, k1, - // k6.. should not be in map - let mut query = Query::new(); - query.insert_key(b"key2".to_vec()); - query.insert_key(b"key3".to_vec()); - query.insert_key(b"key4".to_vec()); - query.insert_key(b"key5".to_vec()); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(4), None), - ); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_with_absence_proof(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - assert_eq!( - result_set[0].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[3].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - - assert_eq!(result_set[0].1, b"key2".to_vec()); - assert_eq!(result_set[1].1, b"key3".to_vec()); - assert_eq!(result_set[2].1, b"key4".to_vec()); - assert_eq!(result_set[3].1, b"key5".to_vec()); - - assert_eq!(result_set[0].2, Some(Element::new_item(b"value2".to_vec()))); - assert_eq!(result_set[1].2, Some(Element::new_item(b"value3".to_vec()))); - assert_eq!(result_set[2].2, None); - assert_eq!(result_set[3].2, None); -} + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"d", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"e", + Element::new_item(vec![2]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"f", + Element::new_item(vec![3]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); -#[test] -fn test_subset_proof_verification() { - let db = make_deep_tree(); - - // original path query - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - // first we prove non-verbose - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) + db.insert( + [TEST_LEAF, b"a", b"d"].as_ref(), + b"d", + Element::new_item(vec![6]), + None, + None, ) - ); - assert_eq!( - result_set[1], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"b"].as_ref(), + b"g", + Element::new_item(vec![4]), + None, + None, ) - ); - assert_eq!( - result_set[2], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b"].as_ref(), + b"d", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[3], - ( - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], - b"key4".to_vec(), - Some(Element::new_item(b"value4".to_vec())) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"i", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[4], - ( - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], - b"key5".to_vec(), - Some(Element::new_item(b"value5".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"j", + Element::empty_tree(), + None, + None, ) - ); - - // prove verbose - let verbose_proof = db.prove_query(&path_query, None).unwrap().unwrap(); - assert!(verbose_proof.len() > proof.len()); - - // subset path query - let mut query = Query::new(); - query.insert_key(b"innertree".to_vec()); - let mut subq = Query::new(); - subq.insert_key(b"key1".to_vec()); - query.set_subquery(subq); - let subset_path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let (hash, result_set) = - GroveDb::verify_subset_query(&verbose_proof, &subset_path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"k", + Element::empty_tree(), + None, + None, ) - ); -} -// #[test] -// fn test_chained_path_query_verification() { -// let db = make_deep_tree(); -// -// let mut query = Query::new(); -// query.insert_all(); -// let mut subq = Query::new(); -// subq.insert_all(); -// let mut subsubq = Query::new(); -// subsubq.insert_all(); -// -// subq.set_subquery(subsubq); -// query.set_subquery(subq); -// -// let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], -// query); -// -// // first prove non verbose -// let proof = db.prove_query(&path_query, None).unwrap().unwrap(); -// let (hash, result_set) = GroveDb::verify_query(&proof, -// &path_query).unwrap(); assert_eq!(hash, -// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 14); -// -// // prove verbose -// let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { -// is_verbose: true, -// multilevel_results: false, -// })).unwrap().unwrap(); -// assert!(verbose_proof.len() > proof.len()); -// -// // init deeper_1 path query -// let mut query = Query::new(); -// query.insert_all(); -// -// let deeper_1_path_query = PathQuery::new_unsized( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_1".to_vec(), -// ], -// query, -// ); -// -// // define the path query generators -// let mut chained_path_queries = vec![]; -// chained_path_queries.push(|_elements: Vec| { -// let mut query = Query::new(); -// query.insert_all(); -// -// let deeper_2_path_query = PathQuery::new_unsized( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_2".to_vec(), -// ], -// query, -// ); -// Some(deeper_2_path_query) -// }); -// -// // verify the path query chain -// let (root_hash, results) = -// GroveDb::verify_query_with_chained_path_queries( &verbose_proof, -// &deeper_1_path_query, -// chained_path_queries, -// ) -// .unwrap(); -// assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); -// assert_eq!(results.len(), 2); -// assert_eq!(results[0].len(), 3); -// assert_eq!( -// results[0][0], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_1".to_vec() -// ], -// b"key1".to_vec(), -// Some(Element::new_item(b"value1".to_vec())) -// ) -// ); -// assert_eq!( -// results[0][1], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_1".to_vec() -// ], -// b"key2".to_vec(), -// Some(Element::new_item(b"value2".to_vec())) -// ) -// ); -// assert_eq!( -// results[0][2], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_1".to_vec() -// ], -// b"key3".to_vec(), -// Some(Element::new_item(b"value3".to_vec())) -// ) -// ); -// -// assert_eq!(results[1].len(), 3); -// assert_eq!( -// results[1][0], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_2".to_vec() -// ], -// b"key4".to_vec(), -// Some(Element::new_item(b"value4".to_vec())) -// ) -// ); -// assert_eq!( -// results[1][1], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_2".to_vec() -// ], -// b"key5".to_vec(), -// Some(Element::new_item(b"value5".to_vec())) -// ) -// ); -// assert_eq!( -// results[1][2], -// ( -// vec![ -// b"deep_leaf".to_vec(), -// b"deep_node_1".to_vec(), -// b"deeper_2".to_vec() -// ], -// b"key6".to_vec(), -// Some(Element::new_item(b"value6".to_vec())) -// ) -// ); -// } -// -// #[test] -// fn test_query_b_depends_on_query_a() { -// // we have two trees -// // one with a mapping of id to name -// // another with a mapping of name to age -// // we want to get the age of every one after a certain id ordered by name -// let db = make_test_grovedb(); -// -// // TEST_LEAF contains the id to name mapping -// db.insert( -// [TEST_LEAF].as_ref(), -// &[1], -// Element::new_item(b"d".to_vec()), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [TEST_LEAF].as_ref(), -// &[2], -// Element::new_item(b"b".to_vec()), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [TEST_LEAF].as_ref(), -// &[3], -// Element::new_item(b"c".to_vec()), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [TEST_LEAF].as_ref(), -// &[4], -// Element::new_item(b"a".to_vec()), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// -// // ANOTHER_TEST_LEAF contains the name to age mapping -// db.insert( -// [ANOTHER_TEST_LEAF].as_ref(), -// b"a", -// Element::new_item(vec![10]), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [ANOTHER_TEST_LEAF].as_ref(), -// b"b", -// Element::new_item(vec![30]), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [ANOTHER_TEST_LEAF].as_ref(), -// b"c", -// Element::new_item(vec![12]), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// db.insert( -// [ANOTHER_TEST_LEAF].as_ref(), -// b"d", -// Element::new_item(vec![46]), -// None, -// None, -// ) -// .unwrap() -// .expect("successful root tree leaf insert"); -// -// // Query: return the age of everyone greater than id 2 ordered by name -// // id 2 - b -// // so we want to return the age for c and d = 12, 46 respectively -// // the proof generator knows that id 2 = b, but the verifier doesn't -// // hence we need to generate two proofs -// // prove that 2 - b then prove age after b -// // the verifier has to use the result of the first proof 2 - b -// // to generate the path query for the verification of the second proof -// -// // query name associated with id 2 -// let mut query = Query::new(); -// query.insert_key(vec![2]); -// let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], -// query); -// -// // first we show that this returns the correct output -// let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); -// let (hash, result_set) = GroveDb::verify_query(&proof, -// &path_query_one).unwrap(); assert_eq!(hash, -// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); -// assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); -// -// // next query should return the age for elements above b -// let mut query = Query::new(); -// query.insert_range_after(b"b".to_vec()..); -// let path_query_two = -// PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); -// -// // show that we get the correct output -// let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); -// let (hash, result_set) = GroveDb::verify_query(&proof, -// &path_query_two).unwrap(); assert_eq!(hash, -// db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); -// assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); -// assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); -// -// // now we merge the path queries -// let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, -// &path_query_two]).unwrap(); merged_path_queries.query.limit = Some(3); -// let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { -// is_verbose: true, -// multilevel_results: false, -// })).unwrap().unwrap(); -// -// // verifier only has access to the statement age > 2 -// // need to first get the name associated with 2 from the proof -// // then use that to construct the next path query -// let mut chained_path_queries = vec![]; -// chained_path_queries.push(|prev_elements: -// Vec| { let mut query = Query::new(); -// let name_element = prev_elements[0].2.as_ref().unwrap(); -// if let Element::Item(name, ..) = name_element { -// query.insert_range_after(name.to_owned()..); -// Some(PathQuery::new( -// vec![ANOTHER_TEST_LEAF.to_vec()], -// SizedQuery::new(query, Some(2), None), -// )) -// } else { -// None -// } -// }); -// -// // add limit to path query one -// path_query_one.query.limit = Some(1); -// -// let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( -// proof.as_slice(), -// &path_query_one, -// chained_path_queries, -// ) -// .unwrap(); -// assert_eq!(result_set.len(), 2); -// assert_eq!(result_set[0].len(), 1); -// assert_eq!(result_set[1].len(), 2); -// -// let age_result = result_set[1].clone(); -// assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); -// assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); -// } - -#[test] -fn test_prove_absent_path_with_intermediate_emtpy_tree() { - // root - // test_leaf (empty) - let grovedb = make_test_grovedb(); - - // prove the absence of key "book" in ["test_leaf", "invalid"] - let mut query = Query::new(); - query.insert_key(b"book".to_vec()); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"invalid".to_vec()], query); - - let proof = grovedb - .prove_query(&path_query, None) .unwrap() - .expect("should generate proofs"); + .expect("successful subtree insert"); + + // if you don't have an item at the subquery path translation, you shouldn't be + // added to the result set. + let mut query = Query::new(); + query.insert_all(); + query.set_subquery_path(vec![b"d".to_vec()]); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + // TODO: proofs seems to be more expressive than query_raw now + // let (elements, _) = db + // .query_raw( + // &path_query, + // true, + // QueryResultType::QueryPathKeyElementTrioResultType, + // None, + // ) + // .unwrap() + // .expect("expected successful get_path_query"); + // + // assert_eq!(elements.len(), 2); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 2); + + // apply path translation then query + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery_path(vec![b"d".to_vec()]); + query.set_subquery(subquery); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + // apply empty path translation + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery_path(vec![]); + query.set_subquery(subquery); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + + // use conditionals to return from more than 2 depth + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + let mut deeper_subquery = Query::new(); + deeper_subquery.insert_all(); + subquery.add_conditional_subquery( + QueryItem::Key(b"d".to_vec()), + None, + Some(deeper_subquery), + ); + query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, Some(subquery.clone())); + query.add_conditional_subquery(QueryItem::Key(b"b".to_vec()), None, Some(subquery.clone())); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 8); + } + + #[test] + fn test_proof_with_limit_zero() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec()], + SizedQuery::new(query, Some(0), Some(0)), + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } + + #[test] + fn test_result_set_path_after_verification() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + // assert the result set path + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + + // Test path tracking with subquery + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[3].path, + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] + ); + assert_eq!( + result_set[4].path, + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] + ); + + // Test path tracking with subquery path + // perform a query, do a translation, perform another query + let mut query = Query::new(); + query.insert_key(b"deep_leaf".to_vec()); + query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + let path_query = PathQuery::new_unsized(vec![], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + assert_eq!( + result_set[0].path, + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ] + ); + assert_eq!( + result_set[1].path, + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ] + ); + assert_eq!( + result_set[2].path, + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ] + ); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + + // Test path tracking for mixed level result set + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.add_conditional_subquery(QueryItem::Key(b"innertree".to_vec()), None, Some(subq)); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!(result_set[3].path, vec![TEST_LEAF.to_vec()]); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + assert_eq!(result_set[3].key, b"innertree4".to_vec()); + } - let (root_hash, result_set) = - GroveDb::verify_query(proof.as_slice(), &path_query).expect("should verify proof"); - assert_eq!(result_set.len(), 0); - assert_eq!(root_hash, grovedb.root_hash(None).unwrap().unwrap()); + #[test] + fn test_verification_with_path_key_optional_element_trio() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + result_set[1], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + result_set[2], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); + } + + #[test] + fn test_absence_proof() { + let db = make_deep_tree(); + + // simple case, request for items k2..=k5 under inner tree + // we pass them as keys as terminal keys does not handle ranges with start or + // end len greater than 1 k2, k3 should be Some, k4, k5 should be None, k1, + // k6.. should not be in map + let mut query = Query::new(); + query.insert_key(b"key2".to_vec()); + query.insert_key(b"key3".to_vec()); + query.insert_key(b"key4".to_vec()); + query.insert_key(b"key5".to_vec()); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(4), None), + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_with_absence_proof(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + assert_eq!( + result_set[0].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[3].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + + assert_eq!(result_set[0].1, b"key2".to_vec()); + assert_eq!(result_set[1].1, b"key3".to_vec()); + assert_eq!(result_set[2].1, b"key4".to_vec()); + assert_eq!(result_set[3].1, b"key5".to_vec()); + + assert_eq!(result_set[0].2, Some(Element::new_item(b"value2".to_vec()))); + assert_eq!(result_set[1].2, Some(Element::new_item(b"value3".to_vec()))); + assert_eq!(result_set[2].2, None); + assert_eq!(result_set[3].2, None); + } + + #[test] + fn test_subset_proof_verification() { + let db = make_deep_tree(); + + // original path query + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + // first we prove non-verbose + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + result_set[1], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + result_set[2], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); + assert_eq!( + result_set[3], + ( + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], + b"key4".to_vec(), + Some(Element::new_item(b"value4".to_vec())) + ) + ); + assert_eq!( + result_set[4], + ( + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], + b"key5".to_vec(), + Some(Element::new_item(b"value5".to_vec())) + ) + ); + + // prove verbose + let verbose_proof = db.prove_query(&path_query, None).unwrap().unwrap(); + assert!(verbose_proof.len() > proof.len()); + + // subset path query + let mut query = Query::new(); + query.insert_key(b"innertree".to_vec()); + let mut subq = Query::new(); + subq.insert_key(b"key1".to_vec()); + query.set_subquery(subq); + let subset_path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let (hash, result_set) = + GroveDb::verify_subset_query(&verbose_proof, &subset_path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + } + // #[test] + // fn test_chained_path_query_verification() { + // let db = make_deep_tree(); + // + // let mut query = Query::new(); + // query.insert_all(); + // let mut subq = Query::new(); + // subq.insert_all(); + // let mut subsubq = Query::new(); + // subsubq.insert_all(); + // + // subq.set_subquery(subsubq); + // query.set_subquery(subq); + // + // let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], + // query); + // + // // first prove non verbose + // let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + // let (hash, result_set) = GroveDb::verify_query(&proof, + // &path_query).unwrap(); assert_eq!(hash, + // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 14); + // + // // prove verbose + // let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { + // is_verbose: true, + // multilevel_results: false, + // })).unwrap().unwrap(); + // assert!(verbose_proof.len() > proof.len()); + // + // // init deeper_1 path query + // let mut query = Query::new(); + // query.insert_all(); + // + // let deeper_1_path_query = PathQuery::new_unsized( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_1".to_vec(), + // ], + // query, + // ); + // + // // define the path query generators + // let mut chained_path_queries = vec![]; + // chained_path_queries.push(|_elements: Vec| { + // let mut query = Query::new(); + // query.insert_all(); + // + // let deeper_2_path_query = PathQuery::new_unsized( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_2".to_vec(), + // ], + // query, + // ); + // Some(deeper_2_path_query) + // }); + // + // // verify the path query chain + // let (root_hash, results) = + // GroveDb::verify_query_with_chained_path_queries( &verbose_proof, + // &deeper_1_path_query, + // chained_path_queries, + // ) + // .unwrap(); + // assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); + // assert_eq!(results.len(), 2); + // assert_eq!(results[0].len(), 3); + // assert_eq!( + // results[0][0], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_1".to_vec() + // ], + // b"key1".to_vec(), + // Some(Element::new_item(b"value1".to_vec())) + // ) + // ); + // assert_eq!( + // results[0][1], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_1".to_vec() + // ], + // b"key2".to_vec(), + // Some(Element::new_item(b"value2".to_vec())) + // ) + // ); + // assert_eq!( + // results[0][2], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_1".to_vec() + // ], + // b"key3".to_vec(), + // Some(Element::new_item(b"value3".to_vec())) + // ) + // ); + // + // assert_eq!(results[1].len(), 3); + // assert_eq!( + // results[1][0], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_2".to_vec() + // ], + // b"key4".to_vec(), + // Some(Element::new_item(b"value4".to_vec())) + // ) + // ); + // assert_eq!( + // results[1][1], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_2".to_vec() + // ], + // b"key5".to_vec(), + // Some(Element::new_item(b"value5".to_vec())) + // ) + // ); + // assert_eq!( + // results[1][2], + // ( + // vec![ + // b"deep_leaf".to_vec(), + // b"deep_node_1".to_vec(), + // b"deeper_2".to_vec() + // ], + // b"key6".to_vec(), + // Some(Element::new_item(b"value6".to_vec())) + // ) + // ); + // } + // + // #[test] + // fn test_query_b_depends_on_query_a() { + // // we have two trees + // // one with a mapping of id to name + // // another with a mapping of name to age + // // we want to get the age of every one after a certain id ordered by name + // let db = make_test_grovedb(); + // + // // TEST_LEAF contains the id to name mapping + // db.insert( + // [TEST_LEAF].as_ref(), + // &[1], + // Element::new_item(b"d".to_vec()), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [TEST_LEAF].as_ref(), + // &[2], + // Element::new_item(b"b".to_vec()), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [TEST_LEAF].as_ref(), + // &[3], + // Element::new_item(b"c".to_vec()), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [TEST_LEAF].as_ref(), + // &[4], + // Element::new_item(b"a".to_vec()), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // + // // ANOTHER_TEST_LEAF contains the name to age mapping + // db.insert( + // [ANOTHER_TEST_LEAF].as_ref(), + // b"a", + // Element::new_item(vec![10]), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [ANOTHER_TEST_LEAF].as_ref(), + // b"b", + // Element::new_item(vec![30]), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [ANOTHER_TEST_LEAF].as_ref(), + // b"c", + // Element::new_item(vec![12]), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // db.insert( + // [ANOTHER_TEST_LEAF].as_ref(), + // b"d", + // Element::new_item(vec![46]), + // None, + // None, + // ) + // .unwrap() + // .expect("successful root tree leaf insert"); + // + // // Query: return the age of everyone greater than id 2 ordered by name + // // id 2 - b + // // so we want to return the age for c and d = 12, 46 respectively + // // the proof generator knows that id 2 = b, but the verifier doesn't + // // hence we need to generate two proofs + // // prove that 2 - b then prove age after b + // // the verifier has to use the result of the first proof 2 - b + // // to generate the path query for the verification of the second proof + // + // // query name associated with id 2 + // let mut query = Query::new(); + // query.insert_key(vec![2]); + // let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], + // query); + // + // // first we show that this returns the correct output + // let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); + // let (hash, result_set) = GroveDb::verify_query(&proof, + // &path_query_one).unwrap(); assert_eq!(hash, + // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); + // assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); + // + // // next query should return the age for elements above b + // let mut query = Query::new(); + // query.insert_range_after(b"b".to_vec()..); + // let path_query_two = + // PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); + // + // // show that we get the correct output + // let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); + // let (hash, result_set) = GroveDb::verify_query(&proof, + // &path_query_two).unwrap(); assert_eq!(hash, + // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); + // assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); + // assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); + // + // // now we merge the path queries + // let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, + // &path_query_two]).unwrap(); merged_path_queries.query.limit = Some(3); + // let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { + // is_verbose: true, + // multilevel_results: false, + // })).unwrap().unwrap(); + // + // // verifier only has access to the statement age > 2 + // // need to first get the name associated with 2 from the proof + // // then use that to construct the next path query + // let mut chained_path_queries = vec![]; + // chained_path_queries.push(|prev_elements: + // Vec| { let mut query = Query::new(); + // let name_element = prev_elements[0].2.as_ref().unwrap(); + // if let Element::Item(name, ..) = name_element { + // query.insert_range_after(name.to_owned()..); + // Some(PathQuery::new( + // vec![ANOTHER_TEST_LEAF.to_vec()], + // SizedQuery::new(query, Some(2), None), + // )) + // } else { + // None + // } + // }); + // + // // add limit to path query one + // path_query_one.query.limit = Some(1); + // + // let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( + // proof.as_slice(), + // &path_query_one, + // chained_path_queries, + // ) + // .unwrap(); + // assert_eq!(result_set.len(), 2); + // assert_eq!(result_set[0].len(), 1); + // assert_eq!(result_set[1].len(), 2); + // + // let age_result = result_set[1].clone(); + // assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); + // assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); + // } + + #[test] + fn test_prove_absent_path_with_intermediate_emtpy_tree() { + // root + // test_leaf (empty) + let grovedb = make_test_grovedb(); + + // prove the absence of key "book" in ["test_leaf", "invalid"] + let mut query = Query::new(); + query.insert_key(b"book".to_vec()); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"invalid".to_vec()], query); + + let proof = grovedb + .prove_query(&path_query, None) + .unwrap() + .expect("should generate proofs"); + + let (root_hash, result_set) = + GroveDb::verify_query(proof.as_slice(), &path_query).expect("should verify proof"); + assert_eq!(result_set.len(), 0); + assert_eq!(root_hash, grovedb.root_hash(None).unwrap().unwrap()); + } } diff --git a/merk/src/lib.rs b/merk/src/lib.rs index 356bd5b8..e1a35d1d 100644 --- a/merk/src/lib.rs +++ b/merk/src/lib.rs @@ -72,10 +72,6 @@ mod visualize; pub use ed; #[cfg(feature = "full")] pub use error::Error; -#[cfg(any(feature = "full", feature = "verify"))] -pub use proofs::query::execute_proof; -#[cfg(any(feature = "full", feature = "verify"))] -pub use proofs::query::verify_query; #[cfg(feature = "full")] pub use tree::{ BatchEntry, Link, MerkBatch, Op, PanicSource, HASH_BLOCK_SIZE, HASH_BLOCK_SIZE_U32, diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index ddc1a832..ac75779d 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Query proofs #[cfg(feature = "full")] @@ -44,7 +16,7 @@ mod verify; #[cfg(any(feature = "full", feature = "verify"))] use std::cmp::Ordering; -use std::{borrow::Cow, collections::HashSet, fmt, ops::RangeFull}; +use std::{collections::HashSet, fmt, ops::RangeFull}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_costs::{cost_return_on_error, CostContext, CostResult, CostsExt, OperationCost}; @@ -59,7 +31,9 @@ pub use query_item::QueryItem; #[cfg(any(feature = "full", feature = "verify"))] use verify::ProofAbsenceLimit; #[cfg(any(feature = "full", feature = "verify"))] -pub use verify::{execute_proof, verify_query, ProofVerificationResult, ProvedKeyValue}; +pub use verify::VerifyOptions; +#[cfg(any(feature = "full", feature = "verify"))] +pub use verify::{ProofVerificationResult, ProvedKeyOptionalValue, ProvedKeyValue}; #[cfg(feature = "full")] use {super::Op, std::collections::LinkedList}; @@ -811,11 +785,7 @@ mod test { *, }; use crate::{ - proofs::query::{ - query_item::QueryItem::RangeAfter, - verify, - verify::{verify_query, ProvedKeyValue}, - }, + proofs::query::{query_item::QueryItem::RangeAfter, verify, verify::ProvedKeyValue}, test_utils::make_tree_seq, tree::{NoopCommit, PanicSource, RefWalker, TreeNode}, TreeFeatureType::BasicMerkNode, @@ -911,7 +881,8 @@ mod test { query.insert_key(key.clone()); } - let result = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let result = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .expect("verify failed"); @@ -1216,7 +1187,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); @@ -1270,7 +1242,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); @@ -1328,7 +1301,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3]), (vec![7], vec![7])]); @@ -1394,7 +1368,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -1450,7 +1425,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![]); @@ -1506,7 +1482,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![]); @@ -1693,7 +1670,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -1839,7 +1817,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -1869,15 +1848,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], @@ -1902,15 +1876,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -1932,15 +1901,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -1962,7 +1926,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2064,7 +2029,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2095,15 +2061,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], @@ -2128,15 +2089,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], @@ -2161,15 +2117,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -2191,7 +2142,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); @@ -2221,7 +2173,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); @@ -2296,7 +2249,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2330,15 +2284,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -2371,15 +2320,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); @@ -2408,15 +2352,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], @@ -2439,15 +2378,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); @@ -2467,15 +2401,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![8], vec![8])]); assert_eq!(res.limit, Some(0)); @@ -2495,15 +2424,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -2525,7 +2449,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2550,15 +2475,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); } @@ -2650,7 +2570,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2689,15 +2610,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); @@ -2726,15 +2642,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); @@ -2763,15 +2674,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -2799,15 +2705,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); @@ -2827,15 +2728,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); @@ -2855,15 +2751,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -2885,7 +2776,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -2915,15 +2807,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } @@ -3015,7 +2902,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -3054,15 +2942,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); @@ -3091,15 +2974,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); @@ -3128,15 +3006,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3164,15 +3037,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); @@ -3192,15 +3060,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); @@ -3220,15 +3083,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -3250,7 +3108,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -3280,15 +3139,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } @@ -3380,7 +3234,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -3419,15 +3274,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); @@ -3456,15 +3306,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -3493,15 +3338,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -3529,15 +3369,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -3557,15 +3392,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); @@ -3585,15 +3415,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -3615,7 +3440,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -3645,15 +3471,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(3), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(3), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], @@ -3743,7 +3564,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); @@ -3774,15 +3596,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); @@ -3811,15 +3628,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -3848,15 +3660,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(98)); @@ -3876,15 +3683,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -3904,15 +3706,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -3932,15 +3729,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -3962,7 +3754,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); @@ -3984,15 +3777,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(300), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(300), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(299)); } @@ -4078,7 +3866,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -4112,15 +3901,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); @@ -4149,15 +3933,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -4186,15 +3965,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], @@ -4217,15 +3991,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -4245,15 +4014,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); @@ -4273,15 +4037,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -4413,7 +4172,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -4454,15 +4214,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); @@ -4491,15 +4246,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); @@ -4528,15 +4278,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![ @@ -4566,15 +4311,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(3), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(3), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples( res.result_set, vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], @@ -4597,15 +4337,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); @@ -4625,15 +4360,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); @@ -4655,7 +4385,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -4687,15 +4418,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } @@ -4766,15 +4492,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); } @@ -4848,15 +4569,10 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } @@ -4950,7 +4666,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, false, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -5054,7 +4771,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -5155,7 +4873,8 @@ mod test { for item in queryitems { query.insert_item(item); } - let res = verify_query(bytes.as_slice(), &query, None, true, tree.hash().unwrap()) + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); compare_result_tuples( @@ -5186,7 +4905,8 @@ mod test { let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 6]); - let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); @@ -5211,7 +4931,8 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 3]..=vec![0, 0, 0, 0, 0, 0, 0, 4]); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 7]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); @@ -5240,7 +4961,8 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 4]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); @@ -5270,7 +4992,8 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); @@ -5299,7 +5022,8 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, Some(5), true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), Some(5), true, expected_hash) .unwrap() .unwrap(); @@ -5328,7 +5052,8 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); @@ -5373,30 +5098,27 @@ mod test { // Try to query 4 let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 4]); - assert!( - verify_query(bytes.as_slice(), &query, Some(3), true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(3), true, expected_hash) + .unwrap() + .is_err()); // if limit offset parameters are different from generation then proof // verification returns an error Try superset proof with increased limit let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - assert!( - verify_query(bytes.as_slice(), &query, Some(4), true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(4), true, expected_hash) + .unwrap() + .is_err()); // Try superset proof with less limit let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - assert!( - verify_query(bytes.as_slice(), &query, Some(2), true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(2), true, expected_hash) + .unwrap() + .is_err()); } #[test] @@ -5516,7 +5238,8 @@ mod test { query.insert_key(key.clone()); } - let _result = verify_query(bytes.as_slice(), &query, None, true, [42; 32]) + let _result = query + .verify_proof(bytes.as_slice(), None, true, [42; 32]) .unwrap() .expect("verify failed"); } diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 4098a051..fa2343ee 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -6,7 +6,7 @@ use grovedb_costs::{cost_return_on_error, CostResult, CostsExt, OperationCost}; use crate::proofs::query::{Map, MapBuilder}; use crate::{ error::Error, - proofs::{tree::execute, Decoder, Node, Op, Query}, + proofs::{hex_to_ascii, tree::execute, Decoder, Node, Op, Query}, tree::value_hash, CryptoHash as MerkHash, CryptoHash, }; @@ -37,279 +37,376 @@ pub fn verify(bytes: &[u8], expected_hash: MerkHash) -> CostResult { }) } -#[cfg(any(feature = "full", feature = "verify"))] -/// Verifies the encoded proof with the given query -/// -/// Every key in `keys` is checked to either have a key/value pair in the proof, -/// or to have its absence in the tree proven. -/// -/// Returns `Err` if the proof is invalid, or a list of proven values associated -/// with `keys`. For example, if `keys` contains keys `A` and `B`, the returned -/// list will contain 2 elements, the value of `A` and the value of `B`. Keys -/// proven to be absent in the tree will have an entry of `None`, keys that have -/// a proven value will have an entry of `Some(value)`. -pub fn execute_proof( - bytes: &[u8], - query: &Query, - limit: Option, - left_to_right: bool, -) -> CostResult<(MerkHash, ProofVerificationResult), Error> { - println!( - "executing proof with limit {:?} going {} using query {}", - limit, - if left_to_right { - "left to right" - } else { - "right to left" - }, - query - ); - let mut cost = OperationCost::default(); - - let mut output = Vec::with_capacity(query.len()); - let mut last_push = None; - let mut query = query.directional_iter(left_to_right).peekable(); - let mut in_range = false; - let original_limit = limit; - let mut current_limit = limit; +#[derive(Copy, Clone, Debug)] +pub struct VerifyOptions { + pub absence_proofs_for_non_existing_searched_keys: bool, + pub verify_proof_succinctness: bool, +} - let ops = Decoder::new(bytes); +impl Default for VerifyOptions { + fn default() -> Self { + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: true, + } + } +} - let root_wrapped = execute(ops, true, |node| { - let mut execute_node = |key: &Vec, - value: Option<&Vec>, - value_hash: CryptoHash| - -> Result<_, Error> { - while let Some(item) = query.peek() { - // get next item in query - let query_item = *item; - let (lower_bound, start_non_inclusive) = query_item.lower_bound(); - let (upper_bound, end_inclusive) = query_item.upper_bound(); - - // terminate if we encounter a node before the current query item. - // this means a node less than the current query item for left to right. - // and a node greater than the current query item for right to left. - let terminate = if left_to_right { - // if the query item is lower unbounded, then a node cannot be less than it. - // checks that the lower bound of the query item not greater than the key - // if they are equal make sure the start is inclusive - !query_item.lower_unbounded() - && ((lower_bound.expect("confirmed not unbounded") > key.as_slice()) - || (start_non_inclusive - && lower_bound.expect("confirmed not unbounded") == key.as_slice())) - } else { - !query_item.upper_unbounded() - && ((upper_bound.expect("confirmed not unbounded") < key.as_slice()) - || (!end_inclusive - && upper_bound.expect("confirmed not unbounded") == key.as_slice())) - }; - if terminate { - break; - } +impl Query { + #[cfg(any(feature = "full", feature = "verify"))] + /// Verifies the encoded proof with the given query + /// + /// Every key in `keys` is checked to either have a key/value pair in the + /// proof, or to have its absence in the tree proven. + /// + /// Returns `Err` if the proof is invalid, or a list of proven values + /// associated with `keys`. For example, if `keys` contains keys `A` and + /// `B`, the returned list will contain 2 elements, the value of `A` and + /// the value of `B`. Keys proven to be absent in the tree will have an + /// entry of `None`, keys that have a proven value will have an entry of + /// `Some(value)`. + pub fn execute_proof( + &self, + bytes: &[u8], + limit: Option, + left_to_right: bool, + ) -> CostResult<(MerkHash, ProofVerificationResult), Error> { + println!( + "executing proof with limit {:?} going {} using query {}", + limit, + if left_to_right { + "left to right" + } else { + "right to left" + }, + self + ); + let mut cost = OperationCost::default(); + + let mut output = Vec::with_capacity(self.len()); + let mut last_push = None; + let mut query = self.directional_iter(left_to_right).peekable(); + let mut in_range = false; + let original_limit = limit; + let mut current_limit = limit; + + let ops = Decoder::new(bytes); + + let root_wrapped = execute(ops, true, |node| { + let mut execute_node = |key: &Vec, + value: Option<&Vec>, + value_hash: CryptoHash| + -> Result<_, Error> { + while let Some(item) = query.peek() { + // get next item in query + let query_item = *item; + let (lower_bound, start_non_inclusive) = query_item.lower_bound(); + let (upper_bound, end_inclusive) = query_item.upper_bound(); + + // terminate if we encounter a node before the current query item. + // this means a node less than the current query item for left to right. + // and a node greater than the current query item for right to left. + let terminate = if left_to_right { + // if the query item is lower unbounded, then a node cannot be less than it. + // checks that the lower bound of the query item not greater than the key + // if they are equal make sure the start is inclusive + !query_item.lower_unbounded() + && ((lower_bound.expect("confirmed not unbounded") > key.as_slice()) + || (start_non_inclusive + && lower_bound.expect("confirmed not unbounded") + == key.as_slice())) + } else { + !query_item.upper_unbounded() + && ((upper_bound.expect("confirmed not unbounded") < key.as_slice()) + || (!end_inclusive + && upper_bound.expect("confirmed not unbounded") + == key.as_slice())) + }; + if terminate { + break; + } - if !in_range { - // this is the first data we have encountered for this query item - if left_to_right { - // ensure lower bound of query item is proven - match last_push { - // lower bound is proven - we have an exact match - // ignoring the case when the lower bound is unbounded - // as it's not possible the get an exact key match for - // an unbounded value - _ if Some(key.as_slice()) == query_item.lower_bound().0 => {} - - // lower bound is proven - this is the leftmost node - // in the tree - None => {} - - // lower bound is proven - the preceding tree node - // is lower than the bound - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // cannot verify lower bound - we have an abridged - // tree so we cannot tell what the preceding key was - Some(_) => { - return Err(Error::InvalidProofError( - "Cannot verify lower bound of queried range".to_string(), - )); + if !in_range { + // this is the first data we have encountered for this query item + if left_to_right { + // ensure lower bound of query item is proven + match last_push { + // lower bound is proven - we have an exact match + // ignoring the case when the lower bound is unbounded + // as it's not possible the get an exact key match for + // an unbounded value + _ if Some(key.as_slice()) == query_item.lower_bound().0 => {} + + // lower bound is proven - this is the leftmost node + // in the tree + None => {} + + // lower bound is proven - the preceding tree node + // is lower than the bound + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} + + // cannot verify lower bound - we have an abridged + // tree so we cannot tell what the preceding key was + Some(_) => { + return Err(Error::InvalidProofError( + "Cannot verify lower bound of queried range".to_string(), + )); + } } - } - } else { - // ensure upper bound of query item is proven - match last_push { - // upper bound is proven - we have an exact match - // ignoring the case when the upper bound is unbounded - // as it's not possible the get an exact key match for - // an unbounded value - _ if Some(key.as_slice()) == query_item.upper_bound().0 => {} - - // lower bound is proven - this is the rightmost node - // in the tree - None => {} - - // upper bound is proven - the preceding tree node - // is greater than the bound - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // cannot verify upper bound - we have an abridged - // tree so we cannot tell what the previous key was - Some(_) => { - return Err(Error::InvalidProofError( - "Cannot verify upper bound of queried range".to_string(), - )); + } else { + // ensure upper bound of query item is proven + match last_push { + // upper bound is proven - we have an exact match + // ignoring the case when the upper bound is unbounded + // as it's not possible the get an exact key match for + // an unbounded value + _ if Some(key.as_slice()) == query_item.upper_bound().0 => {} + + // lower bound is proven - this is the rightmost node + // in the tree + None => {} + + // upper bound is proven - the preceding tree node + // is greater than the bound + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} + + // cannot verify upper bound - we have an abridged + // tree so we cannot tell what the previous key was + Some(_) => { + return Err(Error::InvalidProofError( + "Cannot verify upper bound of queried range".to_string(), + )); + } } } } - } - if left_to_right { - if query_item.upper_bound().0.is_some() - && Some(key.as_slice()) >= query_item.upper_bound().0 + if left_to_right { + if query_item.upper_bound().0.is_some() + && Some(key.as_slice()) >= query_item.upper_bound().0 + { + // at or past upper bound of range (or this was an exact + // match on a single-key queryitem), advance to next query + // item + query.next(); + in_range = false; + } else { + // have not reached upper bound, we expect more values + // to be proven in the range (and all pushes should be + // unabridged until we reach end of range) + in_range = true; + } + } else if query_item.lower_bound().0.is_some() + && Some(key.as_slice()) <= query_item.lower_bound().0 { - // at or past upper bound of range (or this was an exact + // at or before lower bound of range (or this was an exact // match on a single-key queryitem), advance to next query // item query.next(); in_range = false; } else { - // have not reached upper bound, we expect more values + // have not reached lower bound, we expect more values // to be proven in the range (and all pushes should be // unabridged until we reach end of range) in_range = true; } - } else if query_item.lower_bound().0.is_some() - && Some(key.as_slice()) <= query_item.lower_bound().0 - { - // at or before lower bound of range (or this was an exact - // match on a single-key queryitem), advance to next query - // item - query.next(); - in_range = false; - } else { - // have not reached lower bound, we expect more values - // to be proven in the range (and all pushes should be - // unabridged until we reach end of range) - in_range = true; - } - // this push matches the queried item - if query_item.contains(key) { - if let Some(val) = value { - if let Some(limit) = current_limit { - if limit == 0 { - return Err(Error::InvalidProofError(format!( - "Proof returns more data than limit {:?}", - original_limit - ))); - } else { - current_limit = Some(limit - 1); - if current_limit == Some(0) { - in_range = false; + // this push matches the queried item + if query_item.contains(key) { + if let Some(val) = value { + if let Some(limit) = current_limit { + if limit == 0 { + return Err(Error::InvalidProofError(format!( + "Proof returns more data than limit {:?}", + original_limit + ))); + } else { + current_limit = Some(limit - 1); + if current_limit == Some(0) { + in_range = false; + } } } - } - println!( - "pushing {}", - ProvedKeyValue { + println!( + "pushing {}", + ProvedKeyOptionalValue { + key: key.clone(), + value: Some(val.clone()), + proof: value_hash, + } + ); + // add data to output + output.push(ProvedKeyOptionalValue { key: key.clone(), - value: val.clone(), + value: Some(val.clone()), proof: value_hash, - } - ); - // add data to output - output.push(ProvedKeyValue { - key: key.clone(), - value: val.clone(), - proof: value_hash, - }); - - // continue to next push - break; - } else { - return Err(Error::InvalidProofError( - "Proof is missing data for query".to_string(), - )); + }); + + // continue to next push + break; + } else { + return Err(Error::InvalidProofError( + "Proof is missing data for query".to_string(), + )); + } + } + {} + // continue to next queried item + } + Ok(()) + }; + + match node { + Node::KV(key, value) => { + println!("Processing KV node"); + execute_node(key, Some(value), value_hash(value).unwrap())?; + } + Node::KVValueHash(key, value, value_hash) => { + println!("Processing KVValueHash node"); + execute_node(key, Some(value), *value_hash)?; + } + Node::KVDigest(key, value_hash) => { + println!("Processing KVDigest node"); + execute_node(key, None, *value_hash)?; + } + Node::KVRefValueHash(key, value, value_hash) => { + println!("Processing KVRefValueHash node"); + execute_node(key, Some(value), *value_hash)?; + } + Node::Hash(_) | Node::KVHash(_) | Node::KVValueHashFeatureType(..) => { + if in_range { + return Err(Error::InvalidProofError(format!( + "Proof is missing data for query range. Encountered unexpected node \ + type: {}", + node + ))); } } - {} - // continue to next queried item } + + last_push = Some(node.clone()); + Ok(()) - }; + }); - match node { - Node::KV(key, value) => { - println!("Processing KV node"); - execute_node(key, Some(value), value_hash(value).unwrap())?; - } - Node::KVValueHash(key, value, value_hash) => { - println!("Processing KVValueHash node"); - execute_node(key, Some(value), *value_hash)?; - } - Node::KVDigest(key, value_hash) => { - println!("Processing KVDigest node"); - execute_node(key, None, *value_hash)?; - } - Node::KVRefValueHash(key, value, value_hash) => { - println!("Processing KVRefValueHash node"); - execute_node(key, Some(value), *value_hash)?; - } - Node::Hash(_) | Node::KVHash(_) | Node::KVValueHashFeatureType(..) => { - if in_range { - return Err(Error::InvalidProofError(format!( - "Proof is missing data for query range. Encountered unexpected node type: \ - {}", - node - ))); + let root = cost_return_on_error!(&mut cost, root_wrapped); + + // we have remaining query items, check absence proof against right edge of + // tree + if query.peek().is_some() { + if current_limit == Some(0) { + } else { + match last_push { + // last node in tree was less than queried item + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} + + // proof contains abridged data so we cannot verify absence of + // remaining query items + _ => { + return Err(Error::InvalidProofError( + "Proof is missing data for query".to_string(), + )) + .wrap_with_cost(cost) + } } } } - last_push = Some(node.clone()); - - Ok(()) - }); - - let root = cost_return_on_error!(&mut cost, root_wrapped); - - // we have remaining query items, check absence proof against right edge of - // tree - if query.peek().is_some() { - if current_limit == Some(0) { - } else { - match last_push { - // last node in tree was less than queried item - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // proof contains abridged data so we cannot verify absence of - // remaining query items - _ => { - return Err(Error::InvalidProofError( - "Proof is missing data for query".to_string(), - )) - .wrap_with_cost(cost) + Ok(( + root.hash().unwrap_add_cost(&mut cost), + ProofVerificationResult { + result_set: output, + limit: current_limit, + }, + )) + .wrap_with_cost(cost) + } + + #[cfg(any(feature = "full", feature = "verify"))] + /// Verifies the encoded proof with the given query and expected hash + pub fn verify_proof( + &self, + bytes: &[u8], + limit: Option, + left_to_right: bool, + expected_hash: MerkHash, + ) -> CostResult { + self.execute_proof(bytes, limit, left_to_right) + .map_ok(|(root_hash, verification_result)| { + if root_hash == expected_hash { + Ok(verification_result) + } else { + Err(Error::InvalidProofError(format!( + "Proof did not match expected hash\n\tExpected: \ + {expected_hash:?}\n\tActual: {root_hash:?}" + ))) } - } + }) + .flatten() + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(PartialEq, Eq, Debug)] +/// Proved key-value +pub struct ProvedKeyOptionalValue { + /// Key + pub key: Vec, + /// Value + pub value: Option>, + /// Proof + pub proof: CryptoHash, +} + +impl From for ProvedKeyOptionalValue { + fn from(value: ProvedKeyValue) -> Self { + let ProvedKeyValue { key, value, proof } = value; + + ProvedKeyOptionalValue { + key, + value: Some(value), + proof, } } +} + +impl TryFrom for ProvedKeyValue { + type Error = Error; + + fn try_from(value: ProvedKeyOptionalValue) -> Result { + let ProvedKeyOptionalValue { key, value, proof } = value; + let value = value.ok_or(Error::InvalidProofError(format!( + "expected {}", + hex_to_ascii(&key) + )))?; + Ok(ProvedKeyValue { key, value, proof }) + } +} - Ok(( - root.hash().unwrap_add_cost(&mut cost), - ProofVerificationResult { - result_set: output, - limit: current_limit, - }, - )) - .wrap_with_cost(cost) +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedKeyOptionalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProvedKeyValue {{ key: {}, value: {}, proof: {} }}", + String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)), + if let Some(value) = &self.value { + hex::encode(value) + } else { + "None".to_string() + }, + hex::encode(self.proof) + ) + } } #[cfg(any(feature = "full", feature = "verify"))] @@ -342,7 +439,7 @@ impl fmt::Display for ProvedKeyValue { /// Proof verification result pub struct ProofVerificationResult { /// Result set - pub result_set: Vec, + pub result_set: Vec, /// Limit pub limit: Option, } @@ -360,26 +457,3 @@ impl fmt::Display for ProofVerificationResult { write!(f, "}}") } } - -#[cfg(any(feature = "full", feature = "verify"))] -/// Verifies the encoded proof with the given query and expected hash -pub fn verify_query( - bytes: &[u8], - query: &Query, - limit: Option, - left_to_right: bool, - expected_hash: MerkHash, -) -> CostResult { - execute_proof(bytes, query, limit, left_to_right) - .map_ok(|(root_hash, verification_result)| { - if root_hash == expected_hash { - Ok(verification_result) - } else { - Err(Error::InvalidProofError(format!( - "Proof did not match expected hash\n\tExpected: {expected_hash:?}\n\tActual: \ - {root_hash:?}" - ))) - } - }) - .flatten() -} From 89607fea2fe242068f0aba0b39ab9354fb6d3570 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 02:11:52 +0700 Subject: [PATCH 12/34] more work --- grovedb/src/element/mod.rs | 6 +- grovedb/src/element/query.rs | 6 +- grovedb/src/operations/proof/generate.rs | 6 +- grovedb/src/operations/proof/util.rs | 6 +- grovedb/src/query/mod.rs | 6 +- grovedb/src/query_result_type.rs | 6 +- merk/src/proofs/mod.rs | 6 +- merk/src/proofs/query/mod.rs | 448 +++++++++++++---------- merk/src/proofs/query/query_item/mod.rs | 6 +- merk/src/proofs/query/verify.rs | 9 +- 10 files changed, 294 insertions(+), 211 deletions(-) diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index a88429d8..27da770c 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -175,7 +175,11 @@ impl fmt::Display for Element { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } impl Element { diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index e1f0409d..eba7ea9a 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -247,7 +247,11 @@ where } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } impl Element { diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index db491d1f..3f6d320e 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -172,7 +172,11 @@ fn element_hex_to_ascii(hex_value: &[u8]) -> String { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } impl GroveDb { diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 9d86eb28..fa751653 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -152,7 +152,11 @@ fn element_hex_to_ascii(hex_value: &[u8]) -> String { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } impl ProvedPathKeyValue { diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index ab7c06ea..41438b44 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -49,7 +49,11 @@ impl fmt::Display for PathQuery { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } #[cfg(any(feature = "full", feature = "verify"))] diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index e046d5b3..41bf47f8 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -110,7 +110,11 @@ impl BTreeMapLevelResult { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } impl BTreeMapLevelResult { diff --git a/merk/src/proofs/mod.rs b/merk/src/proofs/mod.rs index 5863eaf1..bdf2cdb8 100644 --- a/merk/src/proofs/mod.rs +++ b/merk/src/proofs/mod.rs @@ -125,5 +125,9 @@ impl fmt::Display for Node { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index ac75779d..2004fa54 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -791,14 +791,17 @@ mod test { TreeFeatureType::BasicMerkNode, }; - fn compare_result_tuples( - result_set: Vec, + fn compare_result_tuples_not_optional( + result_set: Vec, expected_result_set: Vec<(Vec, Vec)>, ) { assert_eq!(expected_result_set.len(), result_set.len()); for i in 0..expected_result_set.len() { assert_eq!(expected_result_set[i].0, result_set[i].key); - assert_eq!(expected_result_set[i].1, result_set[i].value); + assert_eq!( + &expected_result_set[i].1, + result_set[i].value.as_ref().expect("expected value") + ); } } @@ -894,7 +897,10 @@ mod test { } for (key, expected_value) in keys.iter().zip(expected_result.iter()) { - assert_eq!(values.get(key), expected_value.as_ref()); + assert_eq!( + values.get(key).map(|a| a.as_ref()).flatten(), + expected_value.as_ref() + ); } } @@ -1127,15 +1133,10 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let res = verify_query( - bytes.as_slice(), - &Query::new(), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = Query::new() + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); assert!(res.result_set.is_empty()); } @@ -1191,7 +1192,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); } #[test] @@ -1246,7 +1247,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); } #[test] @@ -1305,7 +1306,10 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3]), (vec![7], vec![7])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![3], vec![3]), (vec![7], vec![7])], + ); } #[test] @@ -1372,7 +1376,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])], ); @@ -1429,7 +1433,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); } #[test] @@ -1486,7 +1490,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); } #[test] @@ -1674,7 +1678,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![1], vec![1]), @@ -1821,7 +1825,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), @@ -1852,7 +1856,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); @@ -1880,7 +1884,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // skip all elements @@ -1905,7 +1909,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right to left test @@ -1930,7 +1934,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), @@ -2033,7 +2037,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), @@ -2065,7 +2069,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); @@ -2093,7 +2097,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], ); @@ -2121,7 +2125,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -2147,7 +2151,7 @@ mod test { .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), @@ -2178,7 +2182,7 @@ mod test { .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60])], ); @@ -2253,7 +2257,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); @@ -2288,7 +2292,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -2324,7 +2328,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![5], vec![5]), (vec![7], vec![7])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -2356,7 +2363,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], ); @@ -2382,7 +2389,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -2405,7 +2412,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![8], vec![8])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![8], vec![8])]); assert_eq!(res.limit, Some(0)); // skip all elements @@ -2428,7 +2435,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left test @@ -2453,7 +2460,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], ); @@ -2479,7 +2486,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![7], vec![7]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(0)); } @@ -2574,7 +2584,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -2614,7 +2624,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -2646,7 +2656,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -2678,7 +2691,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -2709,7 +2722,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -2732,7 +2745,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // skip all elements @@ -2755,7 +2768,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -2780,7 +2793,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![5], vec![5]), @@ -2811,7 +2824,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])], + ); assert_eq!(res.limit, Some(0)); } @@ -2906,7 +2922,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -2946,7 +2962,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -2978,7 +2994,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -3010,7 +3029,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -3041,7 +3060,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -3064,7 +3083,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // skip all elements @@ -3087,7 +3106,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -3112,7 +3131,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![5], vec![5]), @@ -3143,111 +3162,111 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } #[test] fn range_after_proof() { - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![RangeAfter(vec![3]..)]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) - .unwrap() - .expect("create_proof errored"); - - let mut iter = proof.iter(); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::Hash([ - 121, 235, 207, 195, 143, 58, 159, 120, 166, 33, 151, 45, 178, 124, 91, 233, 201, 4, - 241, 127, 41, 198, 197, 228, 19, 190, 36, 173, 183, 73, 104, 30 - ]))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVDigest( - vec![3], - [ - 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, 153, 104, 205, - 4, 107, 218, 102, 84, 170, 189, 186, 36, 48, 176, 169, 129, 231, 144 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![4], - vec![4], - [ - 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, 205, 89, 51, - 227, 215, 139, 195, 237, 202, 159, 191, 209, 172, 156, 38, 239, 192 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Child)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![5], - vec![5], - [ - 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, 48, 45, 180, 21, - 158, 223, 88, 148, 181, 149, 189, 65, 121, 19, 81, 118, 11, 106 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![7], - vec![7], - [ - 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, 219, 89, 102, - 51, 109, 115, 127, 3, 152, 236, 147, 183, 100, 81, 123, 109, 244, 0 - ] - ))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![8], - vec![8], - [ - 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, 158, 189, 49, - 158, 250, 151, 137, 22, 160, 107, 216, 238, 129, 230, 199, 251, 197, 51 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!(iter.next(), Some(&Op::Child)); - assert!(iter.next().is_none()); - assert_eq!(absence, (false, true)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples( - res.result_set, - vec![ - (vec![4], vec![4]), - (vec![5], vec![5]), - (vec![7], vec![7]), - (vec![8], vec![8]), - ], - ); - assert_eq!(res.limit, None); + // let mut tree = make_6_node_tree(); + // let mut walker = RefWalker::new(&mut tree, PanicSource {}); + // + // let queryitems = vec![RangeAfter(vec![3]..)]; + // let (proof, absence, ..) = walker + // .create_full_proof(queryitems.as_slice(), None, true) + // .unwrap() + // .expect("create_proof errored"); + // + // let mut iter = proof.iter(); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::Hash([ + // 121, 235, 207, 195, 143, 58, 159, 120, 166, 33, 151, 45, 178, 124, + // 91, 233, 201, 4, 241, 127, 41, 198, 197, 228, 19, 190, 36, + // 173, 183, 73, 104, 30 ]))) + // ); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::KVDigest( + // vec![3], + // [ + // 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, + // 153, 104, 205, 4, 107, 218, 102, 84, 170, 189, 186, 36, + // 48, 176, 169, 129, 231, 144 ] + // ))) + // ); + // assert_eq!(iter.next(), Some(&Op::Parent)); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::KVValueHash( + // vec![4], + // vec![4], + // [ + // 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, + // 205, 89, 51, 227, 215, 139, 195, 237, 202, 159, 191, 209, + // 172, 156, 38, 239, 192 ] + // ))) + // ); + // assert_eq!(iter.next(), Some(&Op::Child)); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::KVValueHash( + // vec![5], + // vec![5], + // [ + // 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, + // 48, 45, 180, 21, 158, 223, 88, 148, 181, 149, 189, 65, + // 121, 19, 81, 118, 11, 106 ] + // ))) + // ); + // assert_eq!(iter.next(), Some(&Op::Parent)); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::KVValueHash( + // vec![7], + // vec![7], + // [ + // 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, + // 219, 89, 102, 51, 109, 115, 127, 3, 152, 236, 147, 183, + // 100, 81, 123, 109, 244, 0 ] + // ))) + // ); + // assert_eq!( + // iter.next(), + // Some(&Op::Push(Node::KVValueHash( + // vec![8], + // vec![8], + // [ + // 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, + // 158, 189, 49, 158, 250, 151, 137, 22, 160, 107, 216, 238, + // 129, 230, 199, 251, 197, 51 ] + // ))) + // ); + // assert_eq!(iter.next(), Some(&Op::Parent)); + // assert_eq!(iter.next(), Some(&Op::Child)); + // assert!(iter.next().is_none()); + // assert_eq!(absence, (false, true)); + // + // let mut bytes = vec![]; + // encode_into(proof.iter(), &mut bytes); + // let mut query = Query::new(); + // for item in queryitems { + // query.insert_item(item); + // } + // let res = query + // .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + // .unwrap() + // .unwrap(); + // compare_result_tuples_not_optional( + // res.result_set, + // vec![ + // (vec![4], vec![4]), + // (vec![5], vec![5]), + // (vec![7], vec![7]), + // (vec![8], vec![8]), + // ], + // ); + // assert_eq!(res.limit, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); @@ -3278,7 +3297,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -3310,7 +3329,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -3342,7 +3364,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![4], vec![4]), @@ -3373,7 +3395,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -3396,7 +3418,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); // skip all elements @@ -3419,7 +3441,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -3444,7 +3466,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![8], vec![8]), @@ -3475,7 +3497,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(3), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], ); @@ -3568,7 +3590,10 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, None); // Limit result set to 1 item @@ -3600,7 +3625,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -3632,7 +3657,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -3664,7 +3692,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(98)); // skip 1 element @@ -3687,7 +3718,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -3710,7 +3741,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // skip all elements @@ -3733,7 +3764,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left @@ -3758,7 +3789,10 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])], + ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); @@ -3781,7 +3815,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(300), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(299)); } @@ -3870,7 +3904,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); @@ -3905,7 +3939,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -3937,7 +3971,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -3969,7 +4006,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], ); @@ -3995,7 +4032,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); // skip 2 elements @@ -4018,7 +4055,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); assert_eq!(res.limit, Some(0)); // skip all elements @@ -4041,7 +4078,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -4176,7 +4213,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -4218,7 +4255,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items @@ -4250,7 +4287,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])], + ); assert_eq!(res.limit, Some(0)); // Limit result set to 100 items @@ -4282,7 +4322,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![2], vec![2]), @@ -4315,7 +4355,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(3), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], ); @@ -4341,7 +4381,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])], + ); assert_eq!(res.limit, Some(0)); // skip all elements @@ -4364,7 +4407,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![]); + compare_result_tuples_not_optional(res.result_set, vec![]); assert_eq!(res.limit, Some(1)); // right_to_left proof @@ -4389,7 +4432,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![8], vec![8]), @@ -4422,7 +4465,10 @@ mod test { .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + compare_result_tuples_not_optional( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])], + ); assert_eq!(res.limit, Some(0)); } @@ -4496,7 +4542,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); } @@ -4573,7 +4619,7 @@ mod test { .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); } @@ -4670,7 +4716,7 @@ mod test { .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![8], vec![8]), @@ -4775,7 +4821,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), @@ -4877,7 +4923,7 @@ mod test { .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); @@ -4911,7 +4957,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 1); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], ); @@ -4937,7 +4983,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 4); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), @@ -4967,7 +5013,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), @@ -4998,7 +5044,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5028,7 +5074,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5058,7 +5104,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 4); - compare_result_tuples( + compare_result_tuples_not_optional( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 2], vec![123; 60]), diff --git a/merk/src/proofs/query/query_item/mod.rs b/merk/src/proofs/query/query_item/mod.rs index a637c6b6..2b01b3b7 100644 --- a/merk/src/proofs/query/query_item/mod.rs +++ b/merk/src/proofs/query/query_item/mod.rs @@ -79,7 +79,11 @@ impl fmt::Display for QueryItem { } fn hex_to_ascii(hex_value: &[u8]) -> String { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(&hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + } } #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index fa2343ee..797f4518 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -395,10 +395,15 @@ impl TryFrom for ProvedKeyValue { #[cfg(any(feature = "full", feature = "verify"))] impl fmt::Display for ProvedKeyOptionalValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let key_string = if self.key.len() == 1 && self.key[0] < b"0"[0] { + hex::encode(&self.key) + } else { + String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)) + }; write!( f, - "ProvedKeyValue {{ key: {}, value: {}, proof: {} }}", - String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)), + "ProvedKeyOptionalValue {{ key: {}, value: {}, proof: {} }}", + key_string, if let Some(value) = &self.value { hex::encode(value) } else { From 099cdd0c14a6aebe5ced39a066e51b354da52528 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 03:26:16 +0700 Subject: [PATCH 13/34] fixed tests for merk proofs --- merk/src/proofs/query/mod.rs | 1774 ++++++++++------------------------ 1 file changed, 485 insertions(+), 1289 deletions(-) diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 2004fa54..67105c31 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -130,6 +130,29 @@ impl fmt::Display for Query { } } +macro_rules! compare_result_tuples_not_optional { + ($result_set:expr, $expected_result_set:expr) => { + assert_eq!( + $expected_result_set.len(), + $result_set.len(), + "Result set lengths do not match" + ); + for i in 0..$expected_result_set.len() { + assert_eq!( + $expected_result_set[i].0, $result_set[i].key, + "Key mismatch at index {}", + i + ); + assert_eq!( + &$expected_result_set[i].1, + $result_set[i].value.as_ref().expect("expected value"), + "Value mismatch at index {}", + i + ); + } + }; +} + #[cfg(any(feature = "full", feature = "verify"))] impl Query { /// Creates a new query which contains no items. @@ -791,20 +814,6 @@ mod test { TreeFeatureType::BasicMerkNode, }; - fn compare_result_tuples_not_optional( - result_set: Vec, - expected_result_set: Vec<(Vec, Vec)>, - ) { - assert_eq!(expected_result_set.len(), result_set.len()); - for i in 0..expected_result_set.len() { - assert_eq!(expected_result_set[i].0, result_set[i].key); - assert_eq!( - &expected_result_set[i].1, - result_set[i].value.as_ref().expect("expected value") - ); - } - } - fn make_3_node_tree() -> TreeNode { let mut tree = TreeNode::new(vec![5], vec![5], None, BasicMerkNode) .unwrap() @@ -1145,9 +1154,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![5])]; + let query_items = vec![QueryItem::Key(vec![5])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1185,14 +1194,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); } #[test] @@ -1200,9 +1209,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![3])]; + let query_items = vec![QueryItem::Key(vec![3])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1240,14 +1249,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![3], vec![3])]); } #[test] @@ -1255,9 +1264,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![3]), QueryItem::Key(vec![7])]; + let query_items = vec![QueryItem::Key(vec![3]), QueryItem::Key(vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1299,16 +1308,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![3], vec![3]), (vec![7], vec![7])], + vec![(vec![3], vec![3]), (vec![7], vec![7])] ); } @@ -1317,13 +1326,13 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ QueryItem::Key(vec![3]), QueryItem::Key(vec![5]), QueryItem::Key(vec![7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1369,16 +1378,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])] ); } @@ -1387,9 +1396,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![8])]; + let query_items = vec![QueryItem::Key(vec![8])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1426,14 +1435,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); + compare_result_tuples_not_optional!(res.result_set, Vec::<(Vec, Vec)>::new()); } #[test] @@ -1441,9 +1450,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![6])]; + let query_items = vec![QueryItem::Key(vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1483,14 +1492,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); + compare_result_tuples_not_optional!(res.result_set, Vec::<(Vec, Vec)>::new()); } #[test] @@ -1570,14 +1579,14 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ QueryItem::Key(vec![1]), QueryItem::Key(vec![2]), QueryItem::Key(vec![3]), QueryItem::Key(vec![4]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1671,21 +1680,21 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![1], vec![1]), (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), - ], + ] ); } @@ -1737,11 +1746,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1818,128 +1827,50 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), - ], + ] ); assert_eq!(res.limit, None); - // skip 1 element - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], - ); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - - // skip all elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - // right to left test let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new_with_direction(false); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); } @@ -1948,11 +1879,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeInclusive( + let query_items = vec![QueryItem::RangeInclusive( vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2030,120 +1961,39 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), - ], + ] ); assert_eq!(res.limit, None); - // skip 1 element - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], - ); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], - ); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - // right_to_left proof let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeInclusive( + let query_items = vec![QueryItem::RangeInclusive( vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query @@ -2151,42 +2001,14 @@ mod test { .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], - ); - - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) - .unwrap() - .unwrap(); - - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60])], + ] ); - assert_eq!(res.limit, None); } #[test] @@ -2194,9 +2016,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2250,16 +2072,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], + vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])] ); assert_eq!(res.limit, None); @@ -2267,15 +2089,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::Key(vec![5])]; + let equivalent_query_items = vec![QueryItem::Key(vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2285,33 +2107,33 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![ + let equivalent_query_items = vec![ QueryItem::Key(vec![5]), QueryItem::Key(vec![6]), QueryItem::Key(vec![7]), ]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2321,16 +2143,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![5], vec![5]), (vec![7], vec![7])] ); assert_eq!(res.limit, Some(0)); @@ -2338,15 +2160,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let equivalent_query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2356,95 +2178,26 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], + vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])] ); assert_eq!(res.limit, Some(97)); - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![8], vec![8])]); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - // right_to_left test let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2453,44 +2206,17 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], - ); - - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), false) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(absence, (true, false)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![7], vec![7]), (vec![5], vec![5])], + vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])] ); - assert_eq!(res.limit, Some(0)); } #[test] @@ -2498,9 +2224,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2577,21 +2303,21 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, None); @@ -2599,15 +2325,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2617,29 +2343,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2649,16 +2375,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![2], vec![2]), (vec![3], vec![3])], + vec![(vec![2], vec![2]), (vec![3], vec![3])] ); assert_eq!(res.limit, Some(0)); @@ -2666,15 +2392,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let equivalent_query_items = vec![QueryItem::RangeTo(..vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2684,131 +2410,62 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, Some(96)); - // skip 1 element + // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeTo(..vec![6])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); + assert_eq!(absence, (false, true)); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - - // right_to_left proof - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(absence, (false, true)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![5], vec![5]), (vec![4], vec![4]), (vec![3], vec![3]), (vec![2], vec![2]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), false) + .create_full_proof(query_items.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -2817,16 +2474,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![4], vec![4])], + vec![(vec![5], vec![5]), (vec![4], vec![4])] ); assert_eq!(res.limit, Some(0)); } @@ -2836,9 +2493,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2915,21 +2572,21 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, None); @@ -2937,15 +2594,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2955,29 +2612,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2987,16 +2644,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![2], vec![2]), (vec![3], vec![3])], + vec![(vec![2], vec![2]), (vec![3], vec![3])] ); assert_eq!(res.limit, Some(0)); @@ -3004,15 +2661,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3022,265 +2679,196 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, Some(96)); - // skip 1 element + // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![3], vec![3])]); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); + assert_eq!(absence, (false, true)); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); + compare_result_tuples_not_optional!( + res.result_set, + vec![ + (vec![5], vec![5]), + (vec![4], vec![4]), + (vec![3], vec![3]), + (vec![2], vec![2]), + ] + ); - // skip all elements let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), Some(1), false) .unwrap() .expect("create_proof errored"); + assert_eq!(absence, (false, false)); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), Some(1), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); + assert_eq!(res.limit, Some(0)); + } - // right_to_left proof + #[test] + fn range_after_proof() { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); + let mut iter = proof.iter(); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::Hash([ + 121, 235, 207, 195, 143, 58, 159, 120, 166, 33, 151, 45, 178, 124, 91, 233, 201, 4, + 241, 127, 41, 198, 197, 228, 19, 190, 36, 173, 183, 73, 104, 30 + ]))) + ); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVDigest( + vec![3], + [ + 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, 153, 104, 205, + 4, 107, 218, 102, 84, 170, 189, 186, 36, 48, 176, 169, 129, 231, 144 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Parent)); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![4], + vec![4], + [ + 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, 205, 89, 51, + 227, 215, 139, 195, 237, 202, 159, 191, 209, 172, 156, 38, 239, 192 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Child)); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![5], + vec![5], + [ + 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, 48, 45, 180, 21, + 158, 223, 88, 148, 181, 149, 189, 65, 121, 19, 81, 118, 11, 106 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Parent)); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![7], + vec![7], + [ + 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, 219, 89, 102, + 51, 109, 115, 127, 3, 152, 236, 147, 183, 100, 81, 123, 109, 244, 0 + ] + ))) + ); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![8], + vec![8], + [ + 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, 158, 189, 49, + 158, 250, 151, 137, 22, 160, 107, 216, 238, 129, 230, 199, 251, 197, 51 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Parent)); + assert_eq!(iter.next(), Some(&Op::Child)); + assert!(iter.next().is_none()); assert_eq!(absence, (false, true)); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ - (vec![5], vec![5]), (vec![4], vec![4]), - (vec![3], vec![3]), - (vec![2], vec![2]), - ], + (vec![5], vec![5]), + (vec![7], vec![7]), + (vec![8], vec![8]), + ] ); - - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), false) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(absence, (false, false)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), false, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); - } - - #[test] - fn range_after_proof() { - // let mut tree = make_6_node_tree(); - // let mut walker = RefWalker::new(&mut tree, PanicSource {}); - // - // let queryitems = vec![RangeAfter(vec![3]..)]; - // let (proof, absence, ..) = walker - // .create_full_proof(queryitems.as_slice(), None, true) - // .unwrap() - // .expect("create_proof errored"); - // - // let mut iter = proof.iter(); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::Hash([ - // 121, 235, 207, 195, 143, 58, 159, 120, 166, 33, 151, 45, 178, 124, - // 91, 233, 201, 4, 241, 127, 41, 198, 197, 228, 19, 190, 36, - // 173, 183, 73, 104, 30 ]))) - // ); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::KVDigest( - // vec![3], - // [ - // 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, - // 153, 104, 205, 4, 107, 218, 102, 84, 170, 189, 186, 36, - // 48, 176, 169, 129, 231, 144 ] - // ))) - // ); - // assert_eq!(iter.next(), Some(&Op::Parent)); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::KVValueHash( - // vec![4], - // vec![4], - // [ - // 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, - // 205, 89, 51, 227, 215, 139, 195, 237, 202, 159, 191, 209, - // 172, 156, 38, 239, 192 ] - // ))) - // ); - // assert_eq!(iter.next(), Some(&Op::Child)); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::KVValueHash( - // vec![5], - // vec![5], - // [ - // 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, - // 48, 45, 180, 21, 158, 223, 88, 148, 181, 149, 189, 65, - // 121, 19, 81, 118, 11, 106 ] - // ))) - // ); - // assert_eq!(iter.next(), Some(&Op::Parent)); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::KVValueHash( - // vec![7], - // vec![7], - // [ - // 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, - // 219, 89, 102, 51, 109, 115, 127, 3, 152, 236, 147, 183, - // 100, 81, 123, 109, 244, 0 ] - // ))) - // ); - // assert_eq!( - // iter.next(), - // Some(&Op::Push(Node::KVValueHash( - // vec![8], - // vec![8], - // [ - // 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, - // 158, 189, 49, 158, 250, 151, 137, 22, 160, 107, 216, 238, - // 129, 230, 199, 251, 197, 51 ] - // ))) - // ); - // assert_eq!(iter.next(), Some(&Op::Parent)); - // assert_eq!(iter.next(), Some(&Op::Child)); - // assert!(iter.next().is_none()); - // assert_eq!(absence, (false, true)); - // - // let mut bytes = vec![]; - // encode_into(proof.iter(), &mut bytes); - // let mut query = Query::new(); - // for item in queryitems { - // query.insert_item(item); - // } - // let res = query - // .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) - // .unwrap() - // .unwrap(); - // compare_result_tuples_not_optional( - // res.result_set, - // vec![ - // (vec![4], vec![4]), - // (vec![5], vec![5]), - // (vec![7], vec![7]), - // (vec![8], vec![8]), - // ], - // ); - // assert_eq!(res.limit, None); + assert_eq!(res.limit, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3290,29 +2878,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3322,16 +2910,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], + vec![(vec![4], vec![4]), (vec![5], vec![5])] ); assert_eq!(res.limit, Some(0)); @@ -3339,15 +2927,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let equivalent_query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3357,100 +2945,31 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, Some(96)); - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![RangeAfter(vec![3]..)]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3459,29 +2978,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![RangeAfter(vec![3]..)]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), false) + .create_full_proof(query_items.as_slice(), Some(3), false) .unwrap() .expect("create_proof errored"); @@ -3490,16 +3009,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(3), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], + vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])] ); assert_eq!(res.limit, Some(0)); } @@ -3509,9 +3028,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3578,202 +3097,133 @@ mod test { assert_eq!(iter.next(), Some(&Op::Parent)); assert_eq!(iter.next(), Some(&Op::Child)); assert!(iter.next().is_none()); - assert_eq!(absence, (false, false)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], - ); - assert_eq!(res.limit, None); - - // Limit result set to 1 item - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; - let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(proof, equivalent_proof); - assert_eq!(absence, equivalent_absence); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); - - // Limit result set to 2 items - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; - let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(proof, equivalent_proof); - assert_eq!(absence, equivalent_absence); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], - ); - assert_eq!(res.limit, Some(0)); - - // Limit result set to 100 items - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) - .unwrap() - .expect("create_proof errored"); - - let equivalent_queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(proof, equivalent_proof); - assert_eq!(absence, equivalent_absence); + assert_eq!(absence, (false, false)); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], + vec![(vec![4], vec![4]), (vec![5], vec![5])] ); - assert_eq!(res.limit, Some(98)); + assert_eq!(res.limit, None); - // skip 1 element + // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let (equivalent_proof, equivalent_absence, ..) = walker + .create_full_proof(equivalent_query_items.as_slice(), None, true) + .unwrap() + .expect("create_proof errored"); + + assert_eq!(proof, equivalent_proof); + assert_eq!(absence, equivalent_absence); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - // skip 2 elements + // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), Some(2), true) + .unwrap() + .expect("create_proof errored"); + + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let (equivalent_proof, equivalent_absence, ..) = walker + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); + assert_eq!(proof, equivalent_proof); + assert_eq!(absence, equivalent_absence); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); + assert_eq!(res.limit, Some(0)); - // skip all elements + // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), Some(100), true) + .unwrap() + .expect("create_proof errored"); + + let equivalent_query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let (equivalent_proof, equivalent_absence, ..) = walker + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); + assert_eq!(proof, equivalent_proof); + assert_eq!(absence, equivalent_absence); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); + assert_eq!(res.limit, Some(98)); // right_to_left let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3782,24 +3232,24 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![4], vec![4])], + vec![(vec![5], vec![5]), (vec![4], vec![4])] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(300), false) + .create_full_proof(query_items.as_slice(), Some(300), false) .unwrap() .expect("create_proof errored"); @@ -3808,15 +3258,18 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(300), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(299)); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])] + ); + assert_eq!(res.limit, Some(298)); } #[test] @@ -3824,9 +3277,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3897,16 +3350,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])] ); assert_eq!(res.limit, None); @@ -3914,15 +3367,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3932,29 +3385,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3964,16 +3417,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], + vec![(vec![4], vec![4]), (vec![5], vec![5])] ); assert_eq!(res.limit, Some(0)); @@ -3981,15 +3434,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3999,121 +3452,45 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])] ); assert_eq!(res.limit, Some(97)); - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements + // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); + assert_eq!(absence, (false, false)); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - - // right_to_left proof - // let mut tree = make_6_node_tree(); - // let mut walker = RefWalker::new(&mut tree, PanicSource {}); - // - // let queryitems = - // vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - // let (proof, absence, ..) = walker - // .create_full_proof(queryitems.as_slice(), None, None, false) - // .unwrap() - // .expect("create_proof errored"); - // - // assert_eq!(absence, (false, false)); - // - // let mut bytes = vec![]; - // encode_into(proof.iter(), &mut bytes); - // let mut query = Query::new(); - // for item in queryitems { - // query.insert_item(item); - // } - // let res = verify_query( - // bytes.as_slice(), - // &query, - // None, - // None, - // false, - // tree.hash().unwrap(), - // ) - // .unwrap() - // .unwrap(); - // compare_result_tuples( - // res.result_set, - // vec![(vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4])], - // ); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4])] + ); } #[test] @@ -4121,9 +3498,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4206,14 +3583,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), @@ -4222,7 +3599,7 @@ mod test { (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, None); @@ -4230,15 +3607,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4248,29 +3625,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4280,16 +3657,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![2], vec![2]), (vec![3], vec![3])], + vec![(vec![2], vec![2]), (vec![3], vec![3])] ); assert_eq!(res.limit, Some(0)); @@ -4297,15 +3674,15 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeFull(..)]; + let equivalent_query_items = vec![QueryItem::RangeFull(..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4315,14 +3692,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), @@ -4331,92 +3708,17 @@ mod test { (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, Some(94)); - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(3), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], - ); - assert_eq!(res.limit, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional( - res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5])], - ); - assert_eq!(res.limit, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4425,14 +3727,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), @@ -4441,15 +3743,15 @@ mod test { (vec![4], vec![4]), (vec![3], vec![3]), (vec![2], vec![2]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), false) + .create_full_proof(query_items.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -4458,16 +3760,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![4], vec![4])], + vec![(vec![8], vec![8]), (vec![7], vec![7])] ); assert_eq!(res.limit, Some(0)); } @@ -4477,9 +3779,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![2]..)]; let (proof, _, limit) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); @@ -4535,91 +3837,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = query - .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) - .unwrap() - .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![2], vec![2])]); - assert_eq!(res.limit, Some(0)); - } - - #[test] - fn proof_with_offset() { - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut iter = proof.iter(); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVDigest( - vec![2], - [ - 183, 215, 112, 4, 15, 120, 14, 157, 239, 246, 188, 3, 138, 190, 166, 110, 16, - 139, 136, 208, 152, 209, 109, 36, 205, 116, 134, 235, 103, 16, 96, 178 - ] - ))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVDigest( - vec![3], - [ - 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, 153, 104, 205, - 4, 107, 218, 102, 84, 170, 189, 186, 36, 48, 176, 169, 129, 231, 144 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![4], - vec![4], - [ - 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, 205, 89, 51, - 227, 215, 139, 195, 237, 202, 159, 191, 209, 172, 156, 38, 239, 192 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Child)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVHash([ - 61, 233, 169, 61, 231, 15, 78, 53, 219, 99, 131, 45, 44, 165, 68, 87, 7, 52, 238, - 68, 142, 211, 110, 161, 111, 220, 108, 11, 17, 31, 88, 197 - ]))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::Hash([ - 133, 188, 175, 131, 60, 89, 221, 135, 133, 53, 205, 110, 58, 56, 128, 58, 1, 227, - 75, 122, 83, 20, 125, 44, 149, 44, 62, 130, 252, 134, 105, 200 - ]))) - ); - assert_eq!(iter.next(), Some(&Op::Child)); - assert!(iter.next().is_none()); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional(res.result_set, vec![(vec![4], vec![4])]); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); } @@ -4628,9 +3853,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![3]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4709,14 +3934,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new_with_direction(false); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), @@ -4724,7 +3949,7 @@ mod test { (vec![5], vec![5]), (vec![4], vec![4]), (vec![3], vec![3]), - ], + ] ); } @@ -4733,11 +3958,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 6, 5], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4814,19 +4039,19 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), - ], + ] ); } @@ -4835,12 +4060,12 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ // 7 is not inclusive QueryItem::Range(vec![0, 0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4916,16 +4141,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } let res = query .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) .unwrap() .unwrap(); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], + vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])] ); } @@ -4957,9 +4182,9 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 1); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], + vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])] ); // 1..10 prove (2..=5, 7..10) subset (3..=4, 7..=8) @@ -4983,14 +4208,14 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 4); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 8], vec![123; 60]), - ], + ] ); // 1..10 prove (2..=5, 6..10) subset (4..=8) @@ -5013,7 +4238,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), @@ -5021,7 +4246,7 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 8], vec![123; 60]), - ], + ] ); // 1..10 prove (1..=3, 2..=5) subset (1..=5) @@ -5044,7 +4269,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5052,7 +4277,7 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); // 1..10 prove full (..) limit to 5, subset (1..=5) @@ -5074,7 +4299,7 @@ mod test { .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples_not_optional( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5082,36 +4307,7 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], - ); - - // 1..10 prove full (..) limit to 5, subset (1..=5) - let mut query = Query::new(); - query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - - let mut query = Query::new(); - query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = query - .verify_proof(bytes.as_slice(), None, true, expected_hash) - .unwrap() - .unwrap(); - - assert_eq!(res.result_set.len(), 4); - compare_result_tuples_not_optional( - res.result_set, - vec![ - (vec![0, 0, 0, 0, 0, 0, 0, 2], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); } @@ -5169,10 +4365,10 @@ mod test { #[test] fn query_from_vec() { - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; - let query = Query::from(queryitems); + let query = Query::from(query_items); let mut expected = Vec::new(); expected.push(QueryItem::Range( @@ -5203,8 +4399,8 @@ mod test { #[test] fn query_item_from_vec_u8() { - let queryitems: Vec = vec![42]; - let query = QueryItem::from(queryitems); + let query_items: Vec = vec![42]; + let query = QueryItem::from(query_items); let expected = QueryItem::Key(vec![42]); assert_eq!(query, expected); From fa325e8a5129e15335b90d7e30ce164507e5e3fb Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 08:44:30 +0700 Subject: [PATCH 14/34] more work --- grovedb/src/element/helpers.rs | 6 + grovedb/src/element/mod.rs | 9 +- grovedb/src/element/query.rs | 10 +- grovedb/src/operations/proof/generate.rs | 20 +- grovedb/src/operations/proof/util.rs | 76 ++- grovedb/src/operations/proof/verify.rs | 250 +++++---- grovedb/src/query/mod.rs | 9 +- grovedb/src/query_result_type.rs | 17 +- grovedb/src/tests/query_tests.rs | 628 +++++++++++------------ merk/src/proofs/query/query_item/mod.rs | 9 +- merk/src/proofs/query/verify.rs | 2 + 11 files changed, 516 insertions(+), 520 deletions(-) diff --git a/grovedb/src/element/helpers.rs b/grovedb/src/element/helpers.rs index e7cb9df1..156dcbcc 100644 --- a/grovedb/src/element/helpers.rs +++ b/grovedb/src/element/helpers.rs @@ -133,6 +133,12 @@ impl Element { matches!(self, Element::SumTree(..)) } + #[cfg(any(feature = "full", feature = "verify"))] + /// Check if the element is a tree but not a sum tree + pub fn is_basic_tree(&self) -> bool { + matches!(self, Element::Tree(..)) + } + #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is a tree pub fn is_tree(&self) -> bool { diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index 27da770c..90bc34ec 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -59,6 +59,7 @@ use grovedb_merk::estimated_costs::{LAYER_COST_SIZE, SUM_LAYER_COST_SIZE}; #[cfg(feature = "full")] use grovedb_visualize::visualize_to_vec; +use crate::operations::proof::util::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] use crate::reference_path::ReferencePathType; @@ -174,14 +175,6 @@ impl fmt::Display for Element { } } -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - impl Element { pub fn type_str(&self) -> &str { match self { diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index eba7ea9a..6e6cad6e 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -45,7 +45,6 @@ use grovedb_path::SubtreePath; #[cfg(feature = "full")] use grovedb_storage::{rocksdb_storage::RocksDbStorage, RawIterator, StorageContext}; -use crate::query_result_type::Path; #[cfg(feature = "full")] use crate::{ element::helpers::raw_decode, @@ -59,6 +58,7 @@ use crate::{ util::{merk_optional_tx, merk_optional_tx_internal_error, storage_context_optional_tx}, Error, PathQuery, TransactionArg, }; +use crate::{operations::proof::util::hex_to_ascii, query_result_type::Path}; #[cfg(any(feature = "full", feature = "verify"))] use crate::{Element, SizedQuery}; @@ -246,14 +246,6 @@ where } } -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - impl Element { #[cfg(feature = "full")] /// Returns a vector of result elements based on given query diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 3f6d320e..c04a0424 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -21,8 +21,10 @@ use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; use crate::{ - query_result_type::QueryResultType, reference_path::path_from_reference_path_type, Element, - Error, GroveDb, PathQuery, + operations::proof::util::{element_hex_to_ascii, hex_to_ascii}, + query_result_type::QueryResultType, + reference_path::path_from_reference_path_type, + Element, Error, GroveDb, PathQuery, }; #[derive(Debug, Clone, Copy, Encode, Decode)] @@ -165,20 +167,6 @@ fn node_to_string(node: &Node) -> String { } } -fn element_hex_to_ascii(hex_value: &[u8]) -> String { - Element::deserialize(hex_value) - .map(|e| e.to_string()) - .unwrap_or_else(|_| hex::encode(hex_value)) -} - -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - impl GroveDb { /// Prove one or more path queries. /// If we have more than one path query, we merge into a single path query diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index fa751653..4d28974d 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -1,4 +1,4 @@ -use std::fmt; +use std::{fmt, fmt::Formatter}; use grovedb_merk::{ proofs::query::{Key, Path, ProvedKeyOptionalValue, ProvedKeyValue}, @@ -72,7 +72,7 @@ pub struct ProvedPathKeyValue { } #[cfg(any(feature = "full", feature = "verify"))] -impl fmt::Display for crate::operations::proof::util::ProvedPathKeyValue { +impl fmt::Display for ProvedPathKeyValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ProvedPathKeyValue {{\n")?; write!( @@ -136,29 +136,6 @@ impl TryFrom for ProvedPathKeyValue { } } -fn optional_element_hex_to_ascii(hex_value: Option<&Vec>) -> String { - match hex_value { - None => "None".to_string(), - Some(hex_value) => Element::deserialize(hex_value) - .map(|e| e.to_string()) - .unwrap_or_else(|_| hex::encode(hex_value)), - } -} - -fn element_hex_to_ascii(hex_value: &[u8]) -> String { - Element::deserialize(hex_value) - .map(|e| e.to_string()) - .unwrap_or_else(|_| hex::encode(hex_value)) -} - -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - impl ProvedPathKeyValue { // TODO: make path a reference /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a @@ -266,7 +243,7 @@ mod tests { ]; let proved_path_key_values = ProvedPathKeyOptionalValue::from_proved_key_values(path.clone(), proved_key_values); - assert_eq!(proved_path_key_values.len(), 3); + assert_eq!(proved_path_key_values.len(), 4); assert_eq!( proved_path_key_values[0], ProvedPathKeyOptionalValue { @@ -296,7 +273,7 @@ mod tests { ); assert_eq!( - proved_path_key_values[2], + proved_path_key_values[3], ProvedPathKeyOptionalValue { path, key: b"d".to_vec(), @@ -306,3 +283,48 @@ mod tests { ); } } + +pub fn hex_to_ascii(hex_value: &[u8]) -> String { + // Define the set of allowed characters + const ALLOWED_CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 0123456789_-/\\[]@"; + + // Check if all characters in hex_value are allowed + if hex_value.iter().all(|&c| ALLOWED_CHARS.contains(&c)) { + // Try to convert to UTF-8 + String::from_utf8(hex_value.to_vec()) + .unwrap_or_else(|_| format!("0x{}", hex::encode(&hex_value))) + } else { + // Hex encode and prepend "0x" + format!("0x{}", hex::encode(&hex_value)) + } +} + +pub fn path_hex_to_ascii(path: &Path) -> String { + path.into_iter() + .map(|e| hex_to_ascii(e.as_slice())) + .collect::>() + .join("/") +} + +pub fn path_as_slices_hex_to_ascii(path: &[&[u8]]) -> String { + path.into_iter() + .map(|e| hex_to_ascii(*e)) + .collect::>() + .join("/") +} +pub fn optional_element_hex_to_ascii(hex_value: Option<&Vec>) -> String { + match hex_value { + None => "None".to_string(), + Some(hex_value) => Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)), + } +} + +pub fn element_hex_to_ascii(hex_value: &[u8]) -> String { + Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)) +} diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 883d7c53..00752e13 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -1,14 +1,21 @@ -use std::collections::BTreeSet; +use std::collections::{BTreeMap, BTreeSet}; use grovedb_merk::{ - proofs::{query::VerifyOptions, Query}, + proofs::{ + query::{PathKey, VerifyOptions}, + Query, + }, tree::{combine_hash, value_hash}, + CryptoHash, }; use crate::{ operations::proof::{ generate::{GroveDBProof, GroveDBProofV0, LayerProof}, - util::{ProvedPathKeyOptionalValue, ProvedPathKeyValues}, + util::{ + hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, + ProvedPathKeyOptionalValue, ProvedPathKeyValues, + }, ProveOptions, }, query_result_type::PathKeyOptionalElementTrio, @@ -20,7 +27,21 @@ impl GroveDb { proof: &[u8], query: &PathQuery, options: VerifyOptions, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { + if options.absence_proofs_for_non_existing_searched_keys { + // must have a limit + query.query.limit.ok_or(Error::NotSupported( + "limits must be set in verify_query_with_absence_proof".to_string(), + ))? as usize; + } + + // must have no offset + if query.query.offset.is_some() { + return Err(Error::NotSupported( + "offsets in path queries are not supported for proofs".to_string(), + )); + } + let config = bincode::config::standard() .with_big_endian() .with_no_limit(); @@ -36,7 +57,7 @@ impl GroveDb { pub fn verify_query_raw( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { let config = bincode::config::standard() .with_big_endian() .with_no_limit(); @@ -50,6 +71,7 @@ impl GroveDb { VerifyOptions { absence_proofs_for_non_existing_searched_keys: false, verify_proof_succinctness: false, + include_empty_trees_in_result: true, }, )?; @@ -60,7 +82,7 @@ impl GroveDb { proof: &GroveDBProof, query: &PathQuery, options: VerifyOptions, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { match proof { GroveDBProof::V0(proof_v0) => Self::verify_proof_internal_v0(proof_v0, query, options), } @@ -70,7 +92,7 @@ impl GroveDb { proof: &GroveDBProofV0, query: &PathQuery, options: VerifyOptions, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { let mut result = Vec::new(); let mut limit = query.query.limit; let root_hash = Self::verify_layer_proof( @@ -82,6 +104,42 @@ impl GroveDb { &mut result, &options, )?; + + if options.absence_proofs_for_non_existing_searched_keys { + // must have a limit + let max_results = query.query.limit.ok_or(Error::NotSupported( + "limits must be set in verify_query_with_absence_proof".to_string(), + ))? as usize; + + let terminal_keys = query.terminal_keys(max_results)?; + + // convert the result set to a btree map + let mut result_set_as_map: BTreeMap> = result + .into_iter() + .map(|(path, key, element)| ((path, key), element)) + .collect(); + + println!( + "t{:?}, r{:?}", + terminal_keys + .iter() + .map(|(path, key)| (path_hex_to_ascii(path), hex_to_ascii(key))) + .collect::>(), + result_set_as_map + .iter() + .map(|((path, key), e)| ((path_hex_to_ascii(path), hex_to_ascii(key)), e)) + .collect::>() + ); + + result = terminal_keys + .into_iter() + .map(|terminal_key| { + let element = result_set_as_map.remove(&terminal_key).flatten(); + (terminal_key.0, terminal_key.1, element) + }) + .collect(); + } + Ok((root_hash, result)) } @@ -89,7 +147,7 @@ impl GroveDb { proof: &GroveDBProof, query: &PathQuery, options: VerifyOptions, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { match proof { GroveDBProof::V0(proof_v0) => { Self::verify_proof_raw_internal_v0(proof_v0, query, options) @@ -101,7 +159,7 @@ impl GroveDb { proof: &GroveDBProofV0, query: &PathQuery, options: VerifyOptions, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { let mut result = Vec::new(); let mut limit = query.query.limit; let root_hash = Self::verify_layer_proof( @@ -116,89 +174,6 @@ impl GroveDb { Ok((root_hash, result)) } - // fn verify_layer_proof( - // layer_proof: &LayerProof, - // prove_options: &ProveOptions, - // query: &PathQuery, - // current_path: &[&[u8]], - // result: &mut Vec, - // options: &VerifyOptions, - // ) -> Result<[u8; 32], Error> { - // let internal_query = - // query - // .query_items_at_path(current_path) - // .ok_or(Error::CorruptedPath(format!( - // "verify: path {} should be part of path_query {}", - // current_path - // .iter() - // .map(hex::encode) - // .collect::>() - // .join("/"), - // query - // )))?; - // - // let level_query = Query { - // items: internal_query.items.to_vec(), - // default_subquery_branch: - // internal_query.default_subquery_branch.into_owned(), - // conditional_subquery_branches: internal_query - // .conditional_subquery_branches - // .map(|a| a.into_owned()), - // left_to_right: internal_query.left_to_right, - // }; - // - // let (root_hash, merk_result) = execute_proof( - // &layer_proof.merk_proof, - // &level_query, - // Some(layer_proof.lower_layers.len() as u16), - // internal_query.left_to_right, - // ) - // .unwrap() - // .map_err(|e| { - // eprintln!("{e}"); - // Error::InvalidProof(format!("invalid proof verification parameters: - // {}", e)) })?; - // - // let mut verified_keys = BTreeSet::new(); - // - // for proved_key_value in merk_result.result_set { - // let mut path = current_path.to_vec(); - // let key = proved_key_value.key; - // let value = proved_key_value.value; - // path.push(&key); - // - // verified_keys.insert(key.clone()); - // - // if let Some(lower_layer) = layer_proof.lower_layers.get(&key) { - // let lower_hash = Self::verify_layer_proof( - // lower_layer, - // prove_options, - // query, - // &path, - // result, - // options, - // )?; - // if lower_hash != value_hash(&value).value { - // return Err(Error::InvalidProof("Mismatch in lower layer - // hash".into())); } - // } else { - // let element = Element::deserialize(&value)?; - // result.push(( - // path.iter().map(|p| p.to_vec()).collect(), - // key, - // Some(element), - // )); - // } - // } - // - // // if !is_subset { - // // // Verify completeness only if not doing subset verification - // // self.verify_completeness(&query_items, &merk_result.result_set, - // // current_path)?; } - // - // Ok(root_hash) - // } - fn verify_layer_proof( layer_proof: &LayerProof, prove_options: &ProveOptions, @@ -207,7 +182,7 @@ impl GroveDb { current_path: &[&[u8]], result: &mut Vec, options: &VerifyOptions, - ) -> Result<[u8; 32], Error> + ) -> Result where T: TryFrom, Error: From<>::Error>, @@ -247,7 +222,11 @@ impl GroveDb { Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) })?; - println!("merk result is {}", merk_result); + println!( + "current path {} merk result is {}", + path_as_slices_hex_to_ascii(current_path), + merk_result + ); let mut verified_keys = BTreeSet::new(); @@ -261,13 +240,12 @@ impl GroveDb { if let Some(value_bytes) = &proved_key_value.value { let element = Element::deserialize(value_bytes)?; - path.push(key); - verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(key) { match element { Element::Tree(Some(_), _) | Element::SumTree(Some(_), ..) => { + path.push(key); let lower_hash = Self::verify_layer_proof( lower_layer, prove_options, @@ -303,20 +281,24 @@ impl GroveDb { } } } else if !in_path_proving { - let path_key_optional_value = - ProvedPathKeyOptionalValue::from_proved_key_value( - path.iter().map(|p| p.to_vec()).collect(), - proved_key_value, + if options.include_empty_trees_in_result + || !matches!(element, Element::Tree(None, _)) + { + let path_key_optional_value = + ProvedPathKeyOptionalValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, + ); + println!( + "pushing {} limit left after is {:?}", + &path_key_optional_value, limit_left ); - println!( - "pushing {} limit left after is {:?}", - &path_key_optional_value, limit_left - ); - result.push(path_key_optional_value.try_into()?); - - limit_left.as_mut().map(|limit| *limit -= 1); - if limit_left == &Some(0) { - break; + result.push(path_key_optional_value.try_into()?); + + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; + } } } } @@ -458,13 +440,14 @@ impl GroveDb { pub fn verify_query( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { Self::verify_query_with_options( proof, query, VerifyOptions { absence_proofs_for_non_existing_searched_keys: false, verify_proof_succinctness: true, + include_empty_trees_in_result: false, }, ) } @@ -472,13 +455,14 @@ impl GroveDb { pub fn verify_subset_query( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { Self::verify_query_with_options( proof, query, VerifyOptions { absence_proofs_for_non_existing_searched_keys: false, verify_proof_succinctness: false, + include_empty_trees_in_result: false, }, ) } @@ -486,13 +470,14 @@ impl GroveDb { pub fn verify_query_with_absence_proof( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { Self::verify_query_with_options( proof, query, VerifyOptions { absence_proofs_for_non_existing_searched_keys: true, verify_proof_succinctness: true, + include_empty_trees_in_result: false, }, ) } @@ -500,14 +485,53 @@ impl GroveDb { pub fn verify_subset_query_with_absence_proof( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { + ) -> Result<(CryptoHash, Vec), Error> { Self::verify_query_with_options( proof, query, VerifyOptions { absence_proofs_for_non_existing_searched_keys: true, verify_proof_succinctness: false, + include_empty_trees_in_result: false, }, ) } + + /// Verify subset proof with a chain of path query functions. + /// After subset verification with the first path query, the result if + /// passed to the next path query generation function which generates a + /// new path query Apply the new path query, and pass the result to the + /// next ... This is useful for verifying proofs with multiple path + /// queries that depend on one another. + pub fn verify_query_with_chained_path_queries( + proof: &[u8], + first_query: &PathQuery, + chained_path_queries: Vec, + ) -> Result<(CryptoHash, Vec>), Error> + where + C: Fn(Vec) -> Option, + { + let mut results = vec![]; + + let (last_root_hash, elements) = Self::verify_subset_query(proof, first_query)?; + results.push(elements); + + // we should iterate over each chained path queries + for path_query_generator in chained_path_queries { + let new_path_query = path_query_generator(results[results.len() - 1].clone()).ok_or( + Error::InvalidInput("one of the path query generators returns no path query"), + )?; + let (new_root_hash, new_elements) = Self::verify_subset_query(proof, &new_path_query)?; + if new_root_hash != last_root_hash { + return Err(Error::InvalidProof(format!( + "root hash for different path queries do no match, first is {}, this one is {}", + hex::encode(last_root_hash), + hex::encode(new_root_hash) + ))); + } + results.push(new_elements); + } + + Ok((last_root_hash, results)) + } } diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 41438b44..10f0bdf5 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -9,6 +9,7 @@ use grovedb_merk::proofs::query::SubqueryBranch; use grovedb_merk::proofs::Query; use indexmap::IndexMap; +use crate::operations::proof::util::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] use crate::query_result_type::PathKey; #[cfg(any(feature = "full", feature = "verify"))] @@ -48,14 +49,6 @@ impl fmt::Display for PathQuery { } } -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, Clone)] /// Holds a query to apply to a tree and an optional limit/offset value. diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 41bf47f8..12db23fc 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -9,7 +9,9 @@ use std::{ pub use grovedb_merk::proofs::query::{Key, Path, PathKey}; use crate::{ - operations::proof::util::{ProvedPathKeyOptionalValue, ProvedPathKeyValue}, + operations::proof::util::{ + hex_to_ascii, path_hex_to_ascii, ProvedPathKeyOptionalValue, ProvedPathKeyValue, + }, Element, Error, }; @@ -109,14 +111,6 @@ impl BTreeMapLevelResult { } } -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - impl BTreeMapLevelResult { pub fn len_of_values_at_path(&self, path: &[&[u8]]) -> u16 { let mut current = self; @@ -462,10 +456,7 @@ impl fmt::Display for QueryResultElement { write!( f, "PathKeyElementTrioResultItem(path: {}, key: {}, element: {})", - path.iter() - .map(|p| hex_to_ascii(p)) - .collect::>() - .join("/"), + path_hex_to_ascii(path), hex_to_ascii(key), element ) diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 4aa58e00..8f838aa1 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -7,10 +7,11 @@ mod tests { use crate::{ batch::GroveDbOp, - query_result_type::QueryResultType, + query_result_type::{PathKeyOptionalElementTrio, QueryResultType}, reference_path::ReferencePathType, tests::{ - common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, TEST_LEAF, + common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, + ANOTHER_TEST_LEAF, TEST_LEAF, }, Element, GroveDb, PathQuery, SizedQuery, }; @@ -1826,6 +1827,15 @@ mod tests { let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + + println!( + "{}", + result_set + .iter() + .map(|a| a.to_string()) + .collect::>() + .join(", ") + ); assert_eq!(result_set.len(), 5); // TODO: verify that the result set is exactly the same @@ -2321,7 +2331,6 @@ mod tests { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - // first we prove non-verbose let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); @@ -2367,10 +2376,6 @@ mod tests { ) ); - // prove verbose - let verbose_proof = db.prove_query(&path_query, None).unwrap().unwrap(); - assert!(verbose_proof.len() > proof.len()); - // subset path query let mut query = Query::new(); query.insert_key(b"innertree".to_vec()); @@ -2379,8 +2384,7 @@ mod tests { query.set_subquery(subq); let subset_path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - let (hash, result_set) = - GroveDb::verify_subset_query(&verbose_proof, &subset_path_query).unwrap(); + let (hash, result_set) = GroveDb::verify_subset_query(&proof, &subset_path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); assert_eq!( @@ -2392,315 +2396,303 @@ mod tests { ) ); } - // #[test] - // fn test_chained_path_query_verification() { - // let db = make_deep_tree(); - // - // let mut query = Query::new(); - // query.insert_all(); - // let mut subq = Query::new(); - // subq.insert_all(); - // let mut subsubq = Query::new(); - // subsubq.insert_all(); - // - // subq.set_subquery(subsubq); - // query.set_subquery(subq); - // - // let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], - // query); - // - // // first prove non verbose - // let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - // let (hash, result_set) = GroveDb::verify_query(&proof, - // &path_query).unwrap(); assert_eq!(hash, - // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 14); - // - // // prove verbose - // let verbose_proof = db.prove_query(&path_query, Some(ProveOptions { - // is_verbose: true, - // multilevel_results: false, - // })).unwrap().unwrap(); - // assert!(verbose_proof.len() > proof.len()); - // - // // init deeper_1 path query - // let mut query = Query::new(); - // query.insert_all(); - // - // let deeper_1_path_query = PathQuery::new_unsized( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_1".to_vec(), - // ], - // query, - // ); - // - // // define the path query generators - // let mut chained_path_queries = vec![]; - // chained_path_queries.push(|_elements: Vec| { - // let mut query = Query::new(); - // query.insert_all(); - // - // let deeper_2_path_query = PathQuery::new_unsized( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_2".to_vec(), - // ], - // query, - // ); - // Some(deeper_2_path_query) - // }); - // - // // verify the path query chain - // let (root_hash, results) = - // GroveDb::verify_query_with_chained_path_queries( &verbose_proof, - // &deeper_1_path_query, - // chained_path_queries, - // ) - // .unwrap(); - // assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); - // assert_eq!(results.len(), 2); - // assert_eq!(results[0].len(), 3); - // assert_eq!( - // results[0][0], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_1".to_vec() - // ], - // b"key1".to_vec(), - // Some(Element::new_item(b"value1".to_vec())) - // ) - // ); - // assert_eq!( - // results[0][1], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_1".to_vec() - // ], - // b"key2".to_vec(), - // Some(Element::new_item(b"value2".to_vec())) - // ) - // ); - // assert_eq!( - // results[0][2], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_1".to_vec() - // ], - // b"key3".to_vec(), - // Some(Element::new_item(b"value3".to_vec())) - // ) - // ); - // - // assert_eq!(results[1].len(), 3); - // assert_eq!( - // results[1][0], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_2".to_vec() - // ], - // b"key4".to_vec(), - // Some(Element::new_item(b"value4".to_vec())) - // ) - // ); - // assert_eq!( - // results[1][1], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_2".to_vec() - // ], - // b"key5".to_vec(), - // Some(Element::new_item(b"value5".to_vec())) - // ) - // ); - // assert_eq!( - // results[1][2], - // ( - // vec![ - // b"deep_leaf".to_vec(), - // b"deep_node_1".to_vec(), - // b"deeper_2".to_vec() - // ], - // b"key6".to_vec(), - // Some(Element::new_item(b"value6".to_vec())) - // ) - // ); - // } - // - // #[test] - // fn test_query_b_depends_on_query_a() { - // // we have two trees - // // one with a mapping of id to name - // // another with a mapping of name to age - // // we want to get the age of every one after a certain id ordered by name - // let db = make_test_grovedb(); - // - // // TEST_LEAF contains the id to name mapping - // db.insert( - // [TEST_LEAF].as_ref(), - // &[1], - // Element::new_item(b"d".to_vec()), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [TEST_LEAF].as_ref(), - // &[2], - // Element::new_item(b"b".to_vec()), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [TEST_LEAF].as_ref(), - // &[3], - // Element::new_item(b"c".to_vec()), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [TEST_LEAF].as_ref(), - // &[4], - // Element::new_item(b"a".to_vec()), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // - // // ANOTHER_TEST_LEAF contains the name to age mapping - // db.insert( - // [ANOTHER_TEST_LEAF].as_ref(), - // b"a", - // Element::new_item(vec![10]), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [ANOTHER_TEST_LEAF].as_ref(), - // b"b", - // Element::new_item(vec![30]), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [ANOTHER_TEST_LEAF].as_ref(), - // b"c", - // Element::new_item(vec![12]), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // db.insert( - // [ANOTHER_TEST_LEAF].as_ref(), - // b"d", - // Element::new_item(vec![46]), - // None, - // None, - // ) - // .unwrap() - // .expect("successful root tree leaf insert"); - // - // // Query: return the age of everyone greater than id 2 ordered by name - // // id 2 - b - // // so we want to return the age for c and d = 12, 46 respectively - // // the proof generator knows that id 2 = b, but the verifier doesn't - // // hence we need to generate two proofs - // // prove that 2 - b then prove age after b - // // the verifier has to use the result of the first proof 2 - b - // // to generate the path query for the verification of the second proof - // - // // query name associated with id 2 - // let mut query = Query::new(); - // query.insert_key(vec![2]); - // let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], - // query); - // - // // first we show that this returns the correct output - // let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); - // let (hash, result_set) = GroveDb::verify_query(&proof, - // &path_query_one).unwrap(); assert_eq!(hash, - // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 1); - // assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); - // - // // next query should return the age for elements above b - // let mut query = Query::new(); - // query.insert_range_after(b"b".to_vec()..); - // let path_query_two = - // PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); - // - // // show that we get the correct output - // let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); - // let (hash, result_set) = GroveDb::verify_query(&proof, - // &path_query_two).unwrap(); assert_eq!(hash, - // db.root_hash(None).unwrap().unwrap()); assert_eq!(result_set.len(), 2); - // assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); - // assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); - // - // // now we merge the path queries - // let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, - // &path_query_two]).unwrap(); merged_path_queries.query.limit = Some(3); - // let proof = db.prove_query(&merged_path_queries, Some(ProveOptions { - // is_verbose: true, - // multilevel_results: false, - // })).unwrap().unwrap(); - // - // // verifier only has access to the statement age > 2 - // // need to first get the name associated with 2 from the proof - // // then use that to construct the next path query - // let mut chained_path_queries = vec![]; - // chained_path_queries.push(|prev_elements: - // Vec| { let mut query = Query::new(); - // let name_element = prev_elements[0].2.as_ref().unwrap(); - // if let Element::Item(name, ..) = name_element { - // query.insert_range_after(name.to_owned()..); - // Some(PathQuery::new( - // vec![ANOTHER_TEST_LEAF.to_vec()], - // SizedQuery::new(query, Some(2), None), - // )) - // } else { - // None - // } - // }); - // - // // add limit to path query one - // path_query_one.query.limit = Some(1); - // - // let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( - // proof.as_slice(), - // &path_query_one, - // chained_path_queries, - // ) - // .unwrap(); - // assert_eq!(result_set.len(), 2); - // assert_eq!(result_set[0].len(), 1); - // assert_eq!(result_set[1].len(), 2); - // - // let age_result = result_set[1].clone(); - // assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); - // assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); - // } + #[test] + fn test_chained_path_query_verification() { + let db = make_deep_tree(); + + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + let mut subsubq = Query::new(); + subsubq.insert_all(); + + subq.set_subquery(subsubq); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], query); + + // first prove non verbose + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); + + // init deeper_1 path query + let mut query = Query::new(); + query.insert_all(); + + let deeper_1_path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec(), + ], + query, + ); + + // define the path query generators + let mut chained_path_queries = vec![]; + chained_path_queries.push(|_elements: Vec| { + let mut query = Query::new(); + query.insert_all(); + + let deeper_2_path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec(), + ], + query, + ); + Some(deeper_2_path_query) + }); + + // verify the path query chain + let (root_hash, results) = GroveDb::verify_query_with_chained_path_queries( + &proof, + &deeper_1_path_query, + chained_path_queries, + ) + .unwrap(); + assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(results.len(), 2); + assert_eq!(results[0].len(), 3); + assert_eq!( + results[0][0], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + results[0][1], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + results[0][2], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); + + assert_eq!(results[1].len(), 3); + assert_eq!( + results[1][0], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key4".to_vec(), + Some(Element::new_item(b"value4".to_vec())) + ) + ); + assert_eq!( + results[1][1], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key5".to_vec(), + Some(Element::new_item(b"value5".to_vec())) + ) + ); + assert_eq!( + results[1][2], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key6".to_vec(), + Some(Element::new_item(b"value6".to_vec())) + ) + ); + } + + #[test] + fn test_query_b_depends_on_query_a() { + // we have two trees + // one with a mapping of id to name + // another with a mapping of name to age + // we want to get the age of every one after a certain id ordered by name + let db = make_test_grovedb(); + + // TEST_LEAF contains the id to name mapping + db.insert( + [TEST_LEAF].as_ref(), + &[1], + Element::new_item(b"d".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[2], + Element::new_item(b"b".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[3], + Element::new_item(b"c".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[4], + Element::new_item(b"a".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + + // ANOTHER_TEST_LEAF contains the name to age mapping + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"a", + Element::new_item(vec![10]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"b", + Element::new_item(vec![30]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"c", + Element::new_item(vec![12]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"d", + Element::new_item(vec![46]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + + // Query: return the age of everyone greater than id 2 ordered by name + // id 2 - b + // so we want to return the age for c and d = 12, 46 respectively + // the proof generator knows that id 2 = b, but the verifier doesn't + // hence we need to generate two proofs + // prove that 2 - b then prove age after b + // the verifier has to use the result of the first proof 2 - b + // to generate the path query for the verification of the second proof + + // query name associated with id 2 + let mut query = Query::new(); + query.insert_key(vec![2]); + let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + // first we show that this returns the correct output + let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_one).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); + + // next query should return the age for elements above b + let mut query = Query::new(); + query.insert_range_after(b"b".to_vec()..); + let path_query_two = PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); + + // show that we get the correct output + let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_two).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 2); + assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); + assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); + + // now we merge the path queries + let mut merged_path_queries = + PathQuery::merge(vec![&path_query_one, &path_query_two]).unwrap(); + merged_path_queries.query.limit = Some(3); + let proof = db.prove_query(&merged_path_queries, None).unwrap().unwrap(); + + // verifier only has access to the statement age > 2 + // need to first get the name associated with 2 from the proof + // then use that to construct the next path query + let mut chained_path_queries = vec![]; + chained_path_queries.push(|prev_elements: Vec| { + let mut query = Query::new(); + let name_element = prev_elements[0].2.as_ref().unwrap(); + if let Element::Item(name, ..) = name_element { + query.insert_range_after(name.to_owned()..); + Some(PathQuery::new( + vec![ANOTHER_TEST_LEAF.to_vec()], + SizedQuery::new(query, Some(2), None), + )) + } else { + None + } + }); + + // add limit to path query one + path_query_one.query.limit = Some(1); + + let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( + proof.as_slice(), + &path_query_one, + chained_path_queries, + ) + .unwrap(); + assert_eq!(result_set.len(), 2); + assert_eq!(result_set[0].len(), 1); + assert_eq!(result_set[1].len(), 2); + + let age_result = result_set[1].clone(); + assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); + assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); + } #[test] fn test_prove_absent_path_with_intermediate_emtpy_tree() { diff --git a/merk/src/proofs/query/query_item/mod.rs b/merk/src/proofs/query/query_item/mod.rs index 2b01b3b7..6269c239 100644 --- a/merk/src/proofs/query/query_item/mod.rs +++ b/merk/src/proofs/query/query_item/mod.rs @@ -17,6 +17,7 @@ use grovedb_storage::RawIterator; #[cfg(any(feature = "full", feature = "verify"))] use crate::error::Error; +use crate::proofs::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] /// A `QueryItem` represents a key or range of keys to be included in a proof. @@ -78,14 +79,6 @@ impl fmt::Display for QueryItem { } } -fn hex_to_ascii(hex_value: &[u8]) -> String { - if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) - } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) - } -} - #[cfg(any(feature = "full", feature = "verify"))] impl Hash for QueryItem { fn hash(&self, state: &mut H) { diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 797f4518..20c67541 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -41,6 +41,7 @@ pub fn verify(bytes: &[u8], expected_hash: MerkHash) -> CostResult { pub struct VerifyOptions { pub absence_proofs_for_non_existing_searched_keys: bool, pub verify_proof_succinctness: bool, + pub include_empty_trees_in_result: bool, } impl Default for VerifyOptions { @@ -48,6 +49,7 @@ impl Default for VerifyOptions { VerifyOptions { absence_proofs_for_non_existing_searched_keys: true, verify_proof_succinctness: true, + include_empty_trees_in_result: false, } } } From f4f8d6a2d59ce2b58491a99613fcdbdc44cea3f1 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 23:38:13 +0700 Subject: [PATCH 15/34] more work --- grovedb/src/operations/proof/generate.rs | 7 +- grovedb/src/operations/proof/verify.rs | 45 ++++++----- grovedb/src/query/mod.rs | 79 ++++++++++++++----- grovedb/src/query_result_type.rs | 4 +- grovedb/src/tests/query_tests.rs | 96 +++++++++++++++++++----- grovedb/src/tests/sum_tree_tests.rs | 28 ------- merk/src/proofs/query/mod.rs | 31 ++++++++ 7 files changed, 203 insertions(+), 87 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index c04a0424..8ed00e4c 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -413,7 +413,7 @@ impl GroveDb { Ok(Element::Tree(Some(_), _)) | Ok(Element::SumTree(Some(_), ..)) if !done_with_results && query.has_subquery_on_key(key) => { - println!("found tree {}", hex_to_ascii(key)); + println!("found tree {}, query is {:?}", hex_to_ascii(key), query); // We only want to check in sub nodes for the proof if the tree has // elements let mut lower_path = path.clone(); @@ -442,6 +442,11 @@ impl GroveDb { Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) if !done_with_results => { + println!( + "found tree {}, no subquery query is {:?}", + hex_to_ascii(key), + query + ); overall_limit.as_mut().map(|limit| *limit -= 1); has_a_result_at_level |= true; } diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 00752e13..0f1113f5 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -187,7 +187,6 @@ impl GroveDb { T: TryFrom, Error: From<>::Error>, { - let in_path_proving = current_path.len() < query.path.len(); let internal_query = query .query_items_at_path(current_path) @@ -201,6 +200,8 @@ impl GroveDb { query )))?; + let in_path = internal_query.in_path; + let level_query = Query { items: internal_query.items.to_vec(), default_subquery_branch: internal_query.default_subquery_branch.into_owned(), @@ -243,6 +244,7 @@ impl GroveDb { verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(key) { + println!("lower layer had key {}", hex_to_ascii(key)); match element { Element::Tree(Some(_), _) | Element::SumTree(Some(_), ..) => { path.push(key); @@ -280,27 +282,32 @@ impl GroveDb { )); } } - } else if !in_path_proving { - if options.include_empty_trees_in_result - || !matches!(element, Element::Tree(None, _)) - { - let path_key_optional_value = - ProvedPathKeyOptionalValue::from_proved_key_value( - path.iter().map(|p| p.to_vec()).collect(), - proved_key_value, - ); - println!( - "pushing {} limit left after is {:?}", - &path_key_optional_value, limit_left + } else if options.include_empty_trees_in_result + || !matches!(element, Element::Tree(None, _)) + || !level_query.has_subquery_or_subquery_path_on_key(key, in_path) + { + let path_key_optional_value = + ProvedPathKeyOptionalValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, ); - result.push(path_key_optional_value.try_into()?); - - limit_left.as_mut().map(|limit| *limit -= 1); - if limit_left == &Some(0) { - break; - } + println!( + "pushing {} limit left after is {:?}", + &path_key_optional_value, limit_left + ); + result.push(path_key_optional_value.try_into()?); + + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; } } + } else { + println!( + "we have subquery on key {}: {}", + hex_to_ascii(key), + level_query + ) } } } diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 10f0bdf5..27c9906e 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -4,7 +4,7 @@ use std::{borrow::Cow, cmp::Ordering, fmt}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::query::query_item::QueryItem; -use grovedb_merk::proofs::query::SubqueryBranch; +use grovedb_merk::proofs::query::{Key, SubqueryBranch}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; use indexmap::IndexMap; @@ -306,11 +306,21 @@ impl PathQuery { } else { None } + } else if path_after_top_removed.len() == 0 { + // we are at the terminal of the path + // we include subqueries as this can be useful in + // verification + + let key = + subquery_path[path_after_top_removed.len()].clone(); + if query.has_subquery_or_subquery_path_on_key(&key, false) { + Some(InternalCowItemsQuery::from_key_when_in_terminal_path(key, query)) + } else { + Some(InternalCowItemsQuery::from_key_when_at_terminal_path_with_no_subquery(key)) + } } else { - Some(InternalCowItemsQuery::from_items_when_in_path( - Cow::Owned(vec![QueryItem::Key( - subquery_path[path_after_top_removed.len()].clone(), - )]), + Some(InternalCowItemsQuery::from_key_when_in_path( + subquery_path[path_after_top_removed.len()].clone(), )) }; } @@ -320,9 +330,7 @@ impl PathQuery { return if let Some(subquery) = &subquery_branch.subquery { recursive_query_items(subquery, &path[1..]) } else { - Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( - vec![QueryItem::Key(key.to_vec())], - ))) + Some(InternalCowItemsQuery::from_query(query)) }; } } @@ -341,12 +349,21 @@ impl PathQuery { } else { None } + } else if path_after_top_removed.len() == 0 { + // we are at the terminal of the path + // we include subqueries as this can be useful in verification + let key = subquery_path[path_after_top_removed.len()].clone(); + if query.has_subquery_or_subquery_path_on_key(&key, false) { + Some(InternalCowItemsQuery::from_key_when_in_terminal_path( + key, query, + )) + } else { + Some(InternalCowItemsQuery::from_key_when_at_terminal_path_with_no_subquery(key)) + } } else { - Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( - vec![QueryItem::Key( - subquery_path[path_after_top_removed.len()].clone(), - )], - ))) + Some(InternalCowItemsQuery::from_key_when_in_path( + subquery_path[path_after_top_removed.len()].clone(), + )) }; } } else if path_after_top_removed @@ -372,9 +389,9 @@ impl PathQuery { match given_path_len.cmp(&self_path_len) { Ordering::Less => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some(InternalCowItemsQuery::from_items_when_in_path(Cow::Owned( - vec![QueryItem::Key(self.path[given_path_len].clone())], - ))) + Some(InternalCowItemsQuery::from_key_when_in_path( + self.path[given_path_len].clone(), + )) } else { None } @@ -432,9 +449,35 @@ impl<'a> InternalCowItemsQuery<'a> { return false; } - pub fn from_items_when_in_path(items: Cow>) -> InternalCowItemsQuery { + pub fn from_key_when_in_terminal_path(key: Vec, query: &Query) -> InternalCowItemsQuery { + println!("from_key_when_in_terminal_path {}", query); + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(key)]), + default_subquery_branch: Cow::Borrowed(&query.default_subquery_branch), + conditional_subquery_branches: query + .conditional_subquery_branches + .as_ref() + .map(|conditional_subquery_branches| Cow::Borrowed(conditional_subquery_branches)), + left_to_right: true, + in_path: false, + } + } + + pub fn from_key_when_at_terminal_path_with_no_subquery( + key: Vec, + ) -> InternalCowItemsQuery<'a> { + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(key)]), + default_subquery_branch: Default::default(), + conditional_subquery_branches: None, + left_to_right: true, + in_path: false, + } + } + + pub fn from_key_when_in_path(key: Vec) -> InternalCowItemsQuery<'a> { InternalCowItemsQuery { - items, + items: Cow::Owned(vec![QueryItem::Key(key)]), default_subquery_branch: Default::default(), conditional_subquery_branches: None, left_to_right: true, diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 12db23fc..356fe92e 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -41,7 +41,7 @@ impl fmt::Display for QueryResultType { } /// Query result elements -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub struct QueryResultElements { /// Elements pub elements: Vec, @@ -428,7 +428,7 @@ impl Default for QueryResultElements { } /// Query result element -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub enum QueryResultElement { /// Element result item ElementResultItem(Element), diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 8f838aa1..7f1defb2 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -7,7 +7,10 @@ mod tests { use crate::{ batch::GroveDbOp, - query_result_type::{PathKeyOptionalElementTrio, QueryResultType}, + query_result_type::{ + PathKeyOptionalElementTrio, QueryResultElement::PathKeyElementTrioResultItem, + QueryResultElements, QueryResultType, + }, reference_path::ReferencePathType, tests::{ common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, @@ -1868,6 +1871,16 @@ mod tests { #[test] fn test_mixed_level_proofs_with_subquery_paths() { let db = make_test_grovedb(); + + // TEST_LEAF + // / | \ + // a b c + // / | \ / \ + // d e:2 f:3 g:4 d + // / / | \ + // d:6 i j k + // + db.insert( [TEST_LEAF].as_ref(), b"a", @@ -1980,34 +1993,41 @@ mod tests { ) .unwrap() .expect("successful subtree insert"); - - // if you don't have an item at the subquery path translation, you shouldn't be - // added to the result set. - let mut query = Query::new(); - query.insert_all(); - query.set_subquery_path(vec![b"d".to_vec()]); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - // TODO: proofs seems to be more expressive than query_raw now + // // if you don't have an item at the subquery path translation, you shouldn't + // be // added to the result set. + // let mut query = Query::new(); + // query.insert_all(); + // query.set_subquery_path(vec![b"d".to_vec()]); + // + // let path = vec![TEST_LEAF.to_vec()]; + // + // let path_query = PathQuery::new_unsized(path, query.clone()); + // // let (elements, _) = db // .query_raw( // &path_query, + // false, // true, + // false, // QueryResultType::QueryPathKeyElementTrioResultType, // None, // ) // .unwrap() // .expect("expected successful get_path_query"); // - // assert_eq!(elements.len(), 2); - - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); + // assert_eq!(elements, + // QueryResultElements::from_elements(vec![PathKeyElementTrioResultItem((vec![b" + // test_leaf".to_vec(), b"a".to_vec()], b"d".to_vec(), + // Element::Tree(Some(b"d".to_vec()), None) )), + // PathKeyElementTrioResultItem((vec![b"test_leaf".to_vec(), b"b".to_vec()], + // b"d".to_vec(), Element::Tree(Some(b"j".to_vec()), None) ))])); + // + // let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + // let (hash, result_set) = GroveDb::verify_query_raw(&proof, + // &path_query).unwrap(); assert_eq!(hash, + // db.root_hash(None).unwrap().unwrap()); println!("{}", + // result_set.iter().map(|a| a.to_string()).collect::>().join("| ")); + // assert_eq!(result_set.len(), 2); // apply path translation then query let mut query = Query::new(); @@ -2021,6 +2041,44 @@ mod tests { let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_raw( + &path_query, + false, + true, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!( + elements, + QueryResultElements::from_elements(vec![ + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"a".to_vec(), b"d".to_vec()], + b"d".to_vec(), + Element::Item(vec![6], None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"i".to_vec(), + Element::Tree(None, None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"j".to_vec(), + Element::Tree(None, None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"k".to_vec(), + Element::Tree(None, None) + )) + ]) + ); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); diff --git a/grovedb/src/tests/sum_tree_tests.rs b/grovedb/src/tests/sum_tree_tests.rs index 652a7cad..8f28932f 100644 --- a/grovedb/src/tests/sum_tree_tests.rs +++ b/grovedb/src/tests/sum_tree_tests.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Sum tree tests use grovedb_merk::{ diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 67105c31..49e36749 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -208,6 +208,37 @@ impl Query { } } + pub fn has_subquery_on_key(&self, key: &[u8], in_path: bool) -> bool { + if in_path || self.default_subquery_branch.subquery.is_some() { + return true; + } + if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { + for (query_item, subquery) in conditional_subquery_branches { + if query_item.contains(key) { + return subquery.subquery.is_some(); + } + } + } + return false; + } + + pub fn has_subquery_or_subquery_path_on_key(&self, key: &[u8], in_path: bool) -> bool { + if in_path + || self.default_subquery_branch.subquery.is_some() + || self.default_subquery_branch.subquery_path.is_some() + { + return true; + } + if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { + for query_item in conditional_subquery_branches.keys() { + if query_item.contains(key) { + return true; + } + } + } + return false; + } + /// Pushes terminal key paths and keys to `result`, no more than /// `max_results`. Returns the number of terminal keys added. /// From 8cb7e097e3520115b67f5c776a232ee48f909aec Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Sun, 7 Jul 2024 23:52:55 +0700 Subject: [PATCH 16/34] more work --- grovedb/src/operations/proof/util.rs | 2 ++ grovedb/src/operations/proof/verify.rs | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 4d28974d..2995300e 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -16,6 +16,8 @@ pub type ProvedKeyOptionalValues = Vec; #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedPathKeyValues = Vec; + + #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedPathKeyOptionalValues = Vec; diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 0f1113f5..c94382e7 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -282,9 +282,9 @@ impl GroveDb { )); } } - } else if options.include_empty_trees_in_result + } else if !in_path && (options.include_empty_trees_in_result || !matches!(element, Element::Tree(None, _)) - || !level_query.has_subquery_or_subquery_path_on_key(key, in_path) + || !level_query.has_subquery_or_subquery_path_on_key(key, in_path)) { let path_key_optional_value = ProvedPathKeyOptionalValue::from_proved_key_value( From d77f86526bdb6536d6f10f013919c5ba3150e4f2 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 01:49:39 +0700 Subject: [PATCH 17/34] more work --- grovedb/src/operations/proof/generate.rs | 10 +- grovedb/src/operations/proof/util.rs | 2 - grovedb/src/operations/proof/verify.rs | 28 +- grovedb/src/query/mod.rs | 334 +++++++++++++++++------ grovedb/src/tests/query_tests.rs | 7 +- 5 files changed, 274 insertions(+), 107 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 8ed00e4c..abea5164 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -234,6 +234,13 @@ impl GroveDb { .wrap_with_cost(cost); } + if path_query.query.limit == Some(0) { + return Err(Error::InvalidQuery( + "proved path queries can not be for limit 0", + )) + .wrap_with_cost(cost); + } + // we want to query raw because we want the references to not be resolved at // this point @@ -411,7 +418,8 @@ impl GroveDb { has_a_result_at_level |= true; } Ok(Element::Tree(Some(_), _)) | Ok(Element::SumTree(Some(_), ..)) - if !done_with_results && query.has_subquery_on_key(key) => + if !done_with_results + && query.has_subquery_or_matching_in_path_on_key(key) => { println!("found tree {}, query is {:?}", hex_to_ascii(key), query); // We only want to check in sub nodes for the proof if the tree has diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 2995300e..4d28974d 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -16,8 +16,6 @@ pub type ProvedKeyOptionalValues = Vec; #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedPathKeyValues = Vec; - - #[cfg(any(feature = "full", feature = "verify"))] pub type ProvedPathKeyOptionalValues = Vec; diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index c94382e7..141c0150 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -13,7 +13,7 @@ use crate::{ operations::proof::{ generate::{GroveDBProof, GroveDBProofV0, LayerProof}, util::{ - hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, + element_hex_to_ascii, hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, ProvedPathKeyOptionalValue, ProvedPathKeyValues, }, ProveOptions, @@ -200,15 +200,10 @@ impl GroveDb { query )))?; - let in_path = internal_query.in_path; - let level_query = Query { items: internal_query.items.to_vec(), - default_subquery_branch: internal_query.default_subquery_branch.into_owned(), - conditional_subquery_branches: internal_query - .conditional_subquery_branches - .map(|a| a.into_owned()), left_to_right: internal_query.left_to_right, + ..Default::default() }; let (root_hash, merk_result) = level_query @@ -282,9 +277,9 @@ impl GroveDb { )); } } - } else if !in_path && (options.include_empty_trees_in_result - || !matches!(element, Element::Tree(None, _)) - || !level_query.has_subquery_or_subquery_path_on_key(key, in_path)) + } else if !internal_query.has_subquery_on_key(key) + && (options.include_empty_trees_in_result + || !matches!(element, Element::Tree(None, _))) { let path_key_optional_value = ProvedPathKeyOptionalValue::from_proved_key_value( @@ -301,13 +296,14 @@ impl GroveDb { if limit_left == &Some(0) { break; } + } else { + println!( + "we have subquery on key {} with value {}: {}", + hex_to_ascii(key), + element, + level_query + ) } - } else { - println!( - "we have subquery on key {}: {}", - hex_to_ascii(key), - level_query - ) } } } diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 27c9906e..fcb0863f 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -1,6 +1,10 @@ //! Queries -use std::{borrow::Cow, cmp::Ordering, fmt}; +use std::{ + borrow::{Cow, Cow::Borrowed}, + cmp::Ordering, + fmt, +}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::query::query_item::QueryItem; @@ -306,21 +310,9 @@ impl PathQuery { } else { None } - } else if path_after_top_removed.len() == 0 { - // we are at the terminal of the path - // we include subqueries as this can be useful in - // verification - - let key = - subquery_path[path_after_top_removed.len()].clone(); - if query.has_subquery_or_subquery_path_on_key(&key, false) { - Some(InternalCowItemsQuery::from_key_when_in_terminal_path(key, query)) - } else { - Some(InternalCowItemsQuery::from_key_when_at_terminal_path_with_no_subquery(key)) - } } else { Some(InternalCowItemsQuery::from_key_when_in_path( - subquery_path[path_after_top_removed.len()].clone(), + &subquery_path[path_after_top_removed.len()], )) }; } @@ -343,26 +335,21 @@ impl PathQuery { .zip(subquery_path) .all(|(a, b)| *a == b.as_slice()) { + // The paths are equal for example if we had a sub path of + // path : 1 / 2 + // subquery : All items + + // If we are asking what is the subquery when we are at 1 / 2 + // we should get return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &query.default_subquery_branch.subquery { Some(InternalCowItemsQuery::from_query(subquery)) } else { None } - } else if path_after_top_removed.len() == 0 { - // we are at the terminal of the path - // we include subqueries as this can be useful in verification - let key = subquery_path[path_after_top_removed.len()].clone(); - if query.has_subquery_or_subquery_path_on_key(&key, false) { - Some(InternalCowItemsQuery::from_key_when_in_terminal_path( - key, query, - )) - } else { - Some(InternalCowItemsQuery::from_key_when_at_terminal_path_with_no_subquery(key)) - } } else { Some(InternalCowItemsQuery::from_key_when_in_path( - subquery_path[path_after_top_removed.len()].clone(), + &subquery_path[path_after_top_removed.len()], )) }; } @@ -373,7 +360,10 @@ impl PathQuery { .all(|(a, b)| *a == b.as_slice()) { if let Some(subquery) = &query.default_subquery_branch.subquery { - return recursive_query_items(subquery, &path[subquery_path.len()..]); + return recursive_query_items( + subquery, + &path_after_top_removed[subquery_path.len()..], + ); } } } else if let Some(subquery) = &query.default_subquery_branch.subquery { @@ -390,7 +380,7 @@ impl PathQuery { Ordering::Less => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { Some(InternalCowItemsQuery::from_key_when_in_path( - self.path[given_path_len].clone(), + &self.path[given_path_len], )) } else { None @@ -413,75 +403,67 @@ impl PathQuery { } } +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, Clone, PartialEq)] +pub(crate) enum HasSubquery<'a> { + NoSubquery, + Always, + Conditionally(Cow<'a, IndexMap>), +} + +impl<'a> HasSubquery<'a> { + /// Checks to see if we have a subquery on a specific key + pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { + match self { + HasSubquery::NoSubquery => false, + HasSubquery::Conditionally(conditionally) => conditionally + .keys() + .any(|query_item| query_item.contains(key)), + HasSubquery::Always => true, + } + } +} + /// This represents a query where the items might be borrowed, it is used to get /// subquery information #[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, Default, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq)] pub(crate) struct InternalCowItemsQuery<'a> { /// Items pub items: Cow<'a, Vec>, /// Default subquery branch - pub default_subquery_branch: Cow<'a, SubqueryBranch>, - /// Conditional subquery branches - pub conditional_subquery_branches: Option>>, + pub has_subquery: HasSubquery<'a>, /// Left to right? pub left_to_right: bool, /// In the path of the path_query, or in a subquery path - pub in_path: bool, + pub in_path: Option>, } impl<'a> InternalCowItemsQuery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { - if self.in_path - || self.default_subquery_branch.subquery.is_some() - || self.default_subquery_branch.subquery_path.is_some() - { - return true; - } - if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { - for query_item in conditional_subquery_branches.keys() { - if query_item.contains(key) { - return true; - } - } - } - return false; - } - - pub fn from_key_when_in_terminal_path(key: Vec, query: &Query) -> InternalCowItemsQuery { - println!("from_key_when_in_terminal_path {}", query); - InternalCowItemsQuery { - items: Cow::Owned(vec![QueryItem::Key(key)]), - default_subquery_branch: Cow::Borrowed(&query.default_subquery_branch), - conditional_subquery_branches: query - .conditional_subquery_branches - .as_ref() - .map(|conditional_subquery_branches| Cow::Borrowed(conditional_subquery_branches)), - left_to_right: true, - in_path: false, - } + self.has_subquery.has_subquery_on_key(key) } - pub fn from_key_when_at_terminal_path_with_no_subquery( - key: Vec, - ) -> InternalCowItemsQuery<'a> { - InternalCowItemsQuery { - items: Cow::Owned(vec![QueryItem::Key(key)]), - default_subquery_branch: Default::default(), - conditional_subquery_branches: None, - left_to_right: true, - in_path: false, + /// Checks to see if we have a subquery on a specific key + pub fn has_subquery_or_matching_in_path_on_key(&self, key: &[u8]) -> bool { + if self.has_subquery.has_subquery_on_key(key) { + return true; + } else { + if let Some(path) = self.in_path.as_ref() { + path.as_slice() == key + } else { + false + } } } - pub fn from_key_when_in_path(key: Vec) -> InternalCowItemsQuery<'a> { + pub fn from_key_when_in_path(key: &'a Vec) -> InternalCowItemsQuery<'a> { InternalCowItemsQuery { - items: Cow::Owned(vec![QueryItem::Key(key)]), - default_subquery_branch: Default::default(), - conditional_subquery_branches: None, + items: Cow::Owned(vec![QueryItem::Key(key.clone())]), + has_subquery: HasSubquery::NoSubquery, left_to_right: true, - in_path: true, + in_path: Some(Borrowed(key)), } } @@ -490,15 +472,20 @@ impl<'a> InternalCowItemsQuery<'a> { } pub fn from_query(query: &Query) -> InternalCowItemsQuery { + let has_subquery = if query.default_subquery_branch.subquery.is_some() + || query.default_subquery_branch.subquery_path.is_some() + { + HasSubquery::Always + } else if let Some(conditional) = query.conditional_subquery_branches.as_ref() { + HasSubquery::Conditionally(Cow::Borrowed(conditional)) + } else { + HasSubquery::NoSubquery + }; InternalCowItemsQuery { items: Cow::Borrowed(&query.items), - default_subquery_branch: Cow::Borrowed(&query.default_subquery_branch), - conditional_subquery_branches: query - .conditional_subquery_branches - .as_ref() - .map(|conditional_subquery_branches| Cow::Borrowed(conditional_subquery_branches)), + has_subquery, left_to_right: query.left_to_right, - in_path: false, + in_path: None, } } } @@ -506,14 +493,18 @@ impl<'a> InternalCowItemsQuery<'a> { #[cfg(feature = "full")] #[cfg(test)] mod tests { - use std::ops::RangeFull; + use std::{borrow::Cow, ops::RangeFull}; - use grovedb_merk::proofs::{query::query_item::QueryItem, Query}; + use grovedb_merk::proofs::{ + query::{query_item::QueryItem, SubqueryBranch}, + Query, + }; use crate::{ + query::{HasSubquery, HasSubquery::NoSubquery, InternalCowItemsQuery}, query_result_type::QueryResultType, tests::{common::compare_result_tuples, make_deep_tree, TEST_LEAF}, - Element, GroveDb, PathQuery, + Element, GroveDb, PathQuery, SizedQuery, }; #[test] @@ -1102,4 +1093,179 @@ mod tests { .expect("should execute proof"); assert_eq!(result_set.len(), 4); } + + #[test] + fn test_identities_contract_keys_proof() { + // Constructing the keys and paths + let root_path_key_1 = b"root_path_key_1".to_vec(); + let root_path_key_2 = b"root_path_key_2".to_vec(); + let root_item_key = b"root_item_key".to_vec(); + let subquery_path_key_1 = b"subquery_path_key_1".to_vec(); + let subquery_path_key_2 = b"subquery_path_key_2".to_vec(); + let subquery_item_key = b"subquery_item_key".to_vec(); + let inner_subquery_path_key = b"inner_subquery_path_key".to_vec(); + + // Constructing the subquery + let subquery = Query { + items: vec![QueryItem::Key(subquery_item_key.clone())], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![inner_subquery_path_key.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + }; + + // Constructing the PathQuery + let path_query = PathQuery { + path: vec![root_path_key_1.clone(), root_path_key_2.clone()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::Key(root_item_key.clone())], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![ + subquery_path_key_1.clone(), + subquery_path_key_2.clone(), + ]), + subquery: Some(Box::new(subquery)), + }, + left_to_right: true, + conditional_subquery_branches: None, + }, + limit: Some(2), + offset: None, + }, + }; + + { + let path = vec![root_path_key_1.as_slice()]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(root_path_key_2.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&root_path_key_2)), + } + ); + } + + { + let path = vec![root_path_key_1.as_slice(), root_path_key_2.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(root_item_key.clone())]), + has_subquery: HasSubquery::Always, /* This is correct because there's a + * subquery for one item */ + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + ]; + + let third = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + third, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_1.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&subquery_path_key_1)) + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + ]; + + let fourth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + fourth, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_2.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&subquery_path_key_2)) + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + subquery_path_key_2.as_slice(), + ]; + + let fifth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + fifth, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(subquery_item_key.clone())]), + has_subquery: HasSubquery::Always, /* This means that we should be able to + * add items underneath */ + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + subquery_path_key_2.as_slice(), + subquery_item_key.as_slice(), + ]; + + let sixth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + sixth, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(inner_subquery_path_key.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&inner_subquery_path_key)) + } + ); + } + } } diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 7f1defb2..91d36e5b 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -2136,10 +2136,9 @@ mod tests { SizedQuery::new(query, Some(0), Some(0)), ); - let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); + db.prove_query(&path_query, None) + .unwrap() + .expect_err("expected error when trying to prove with limit 0"); } #[test] From 7beb692f6077aec05ab23d5499ee6ae429f2001b Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 02:11:46 +0700 Subject: [PATCH 18/34] all tests passing --- grovedb/src/element/helpers.rs | 10 ++++++++-- grovedb/src/element/query.rs | 2 +- grovedb/src/lib.rs | 8 ++++---- grovedb/src/operations/auxiliary.rs | 2 +- grovedb/src/operations/delete/mod.rs | 8 ++++---- grovedb/src/operations/get/query.rs | 2 +- grovedb/src/operations/insert/mod.rs | 4 ++-- grovedb/src/operations/proof/verify.rs | 7 ++++--- grovedb/src/tests/query_tests.rs | 17 +++++++++++++++++ 9 files changed, 42 insertions(+), 18 deletions(-) diff --git a/grovedb/src/element/helpers.rs b/grovedb/src/element/helpers.rs index 156dcbcc..222bafbd 100644 --- a/grovedb/src/element/helpers.rs +++ b/grovedb/src/element/helpers.rs @@ -141,7 +141,7 @@ impl Element { #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is a tree - pub fn is_tree(&self) -> bool { + pub fn is_any_tree(&self) -> bool { matches!(self, Element::SumTree(..) | Element::Tree(..)) } @@ -153,10 +153,16 @@ impl Element { #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is an item - pub fn is_item(&self) -> bool { + pub fn is_any_item(&self) -> bool { matches!(self, Element::Item(..) | Element::SumItem(..)) } + #[cfg(any(feature = "full", feature = "verify"))] + /// Check if the element is an item + pub fn is_basic_item(&self) -> bool { + matches!(self, Element::Item(..)) + } + #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is a sum item pub fn is_sum_item(&self) -> bool { diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index 6e6cad6e..46e2a9ab 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -449,7 +449,7 @@ impl Element { decrease_limit_on_range_with_no_sub_elements, .. } = query_options; - if element.is_tree() { + if element.is_any_tree() { let mut path_vec = path.to_vec(); let key = cost_return_on_error_no_add!( &cost, diff --git a/grovedb/src/lib.rs b/grovedb/src/lib.rs index c7550c5a..649ab11d 100644 --- a/grovedb/src/lib.rs +++ b/grovedb/src/lib.rs @@ -894,7 +894,7 @@ impl GroveDb { while let Some((key, element_value)) = element_iterator.next_kv().unwrap() { let element = raw_decode(&element_value)?; - if element.is_tree() { + if element.is_any_tree() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -924,7 +924,7 @@ impl GroveDb { ); } issues.extend(self.verify_merk_and_submerks(inner_merk, &new_path_ref, batch)?); - } else if element.is_item() { + } else if element.is_any_item() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -964,7 +964,7 @@ impl GroveDb { while let Some((key, element_value)) = element_iterator.next_kv().unwrap() { let element = raw_decode(&element_value)?; - if element.is_tree() { + if element.is_any_tree() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -999,7 +999,7 @@ impl GroveDb { batch, transaction, )?); - } else if element.is_item() { + } else if element.is_any_item() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, diff --git a/grovedb/src/operations/auxiliary.rs b/grovedb/src/operations/auxiliary.rs index 1b6b884d..6f9fd576 100644 --- a/grovedb/src/operations/auxiliary.rs +++ b/grovedb/src/operations/auxiliary.rs @@ -155,7 +155,7 @@ impl GroveDb { while let Some((key, value)) = cost_return_on_error!(&mut cost, raw_iter.next_element()) { - if value.is_tree() { + if value.is_any_tree() { let mut sub_path = q.clone(); sub_path.push(key.to_vec()); queue.push(sub_path.clone()); diff --git a/grovedb/src/operations/delete/mod.rs b/grovedb/src/operations/delete/mod.rs index 84d14652..d13fdd61 100644 --- a/grovedb/src/operations/delete/mod.rs +++ b/grovedb/src/operations/delete/mod.rs @@ -221,7 +221,7 @@ impl GroveDb { element_iterator.next_kv().unwrap_add_cost(&mut cost) { let element = raw_decode(&element_value).unwrap(); - if element.is_tree() { + if element.is_any_tree() { if options.allow_deleting_subtrees { cost_return_on_error!( &mut cost, @@ -284,7 +284,7 @@ impl GroveDb { { let element = raw_decode(&element_value).unwrap(); if options.allow_deleting_subtrees { - if element.is_tree() { + if element.is_any_tree() { cost_return_on_error!( &mut cost, self.delete( @@ -636,7 +636,7 @@ impl GroveDb { self.open_transactional_merk_at_path(path.clone(), transaction, Some(batch)) ); let uses_sum_tree = subtree_to_delete_from.is_sum_tree; - if element.is_tree() { + if element.is_any_tree() { let subtree_merk_path = path.derive_owned_with_child(key); let subtree_merk_path_ref = SubtreePath::from(&subtree_merk_path); @@ -800,7 +800,7 @@ impl GroveDb { self.open_non_transactional_merk_at_path(path.clone(), Some(batch)) ); let uses_sum_tree = subtree_to_delete_from.is_sum_tree; - if element.is_tree() { + if element.is_any_tree() { let subtree_merk_path = path.derive_owned_with_child(key); let subtree_of_tree_we_are_deleting = cost_return_on_error!( &mut cost, diff --git a/grovedb/src/operations/get/query.rs b/grovedb/src/operations/get/query.rs index b4e57c3b..6ba914ef 100644 --- a/grovedb/src/operations/get/query.rs +++ b/grovedb/src/operations/get/query.rs @@ -192,7 +192,7 @@ where { ) .unwrap_add_cost(cost)?; - if maybe_item.is_item() { + if maybe_item.is_any_item() { Ok(maybe_item) } else { Err(Error::InvalidQuery("the reference must result in an item")) diff --git a/grovedb/src/operations/insert/mod.rs b/grovedb/src/operations/insert/mod.rs index 513e2098..5670a939 100644 --- a/grovedb/src/operations/insert/mod.rs +++ b/grovedb/src/operations/insert/mod.rs @@ -239,7 +239,7 @@ impl GroveDb { Error::CorruptedData(String::from("unable to deserialize element")) }) ); - if element.is_tree() { + if element.is_any_tree() { return Err(Error::OverrideNotAllowed( "insertion not allowed to override tree", )) @@ -378,7 +378,7 @@ impl GroveDb { Error::CorruptedData(String::from("unable to deserialize element")) }) ); - if element.is_tree() { + if element.is_any_tree() { return Err(Error::OverrideNotAllowed( "insertion not allowed to override tree", )) diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 141c0150..01de1214 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -277,9 +277,10 @@ impl GroveDb { )); } } - } else if !internal_query.has_subquery_on_key(key) - && (options.include_empty_trees_in_result - || !matches!(element, Element::Tree(None, _))) + } else if element.is_any_item() + || !internal_query.has_subquery_or_matching_in_path_on_key(key) + && (options.include_empty_trees_in_result + || !matches!(element, Element::Tree(None, _))) { let path_key_optional_value = ProvedPathKeyOptionalValue::from_proved_key_value( diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 91d36e5b..a9503a5e 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -1567,6 +1567,15 @@ mod tests { #[test] fn test_mixed_level_proofs() { let db = make_test_grovedb(); + + // TEST_LEAF + // / | | \ + // key1 key2 : [1] key3 key4 : (Ref -> Key2) + // / | \ + // k1 k2 k3 + // / / / + // 2 3 4 + db.insert( [TEST_LEAF].as_ref(), b"key1", @@ -1652,6 +1661,14 @@ mod tests { let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + println!( + "{}", + result_set + .iter() + .map(|a| a.to_string()) + .collect::>() + .join(" | ") + ); assert_eq!(result_set.len(), 5); compare_result_sets(&elements, &result_set); From 14e3b425fc28da18b65675b531b82732d90a724a Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 04:36:02 +0700 Subject: [PATCH 19/34] trial --- grovedb/src/operations/proof/generate.rs | 15 ---- grovedb/src/query/mod.rs | 94 ++++++++++++++++++++++-- merk/src/proofs/query/mod.rs | 9 ++- 3 files changed, 96 insertions(+), 22 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index abea5164..d19a3061 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -320,21 +320,6 @@ impl GroveDb { self.open_non_transactional_merk_at_path(path.as_slice().into(), None) ); - // let mut items_to_prove: BTreeSet> = layer_precomputed_results - // .as_ref() - // .map_or(BTreeSet::new(), |map| { - // map.key_values.keys().cloned().collect() - // }); - // - // for query_item in query_at_path.as_slice() { - // match query_item { - // QueryItem::Key(key) => { - // items_to_prove.insert(key.clone()); - // } - // _ => {} - // } - // } - let limit = if path.len() < path_query.path.len() { // There is no need for a limit because we are only asking for a single item None diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index fcb0863f..fa54f971 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -311,8 +311,13 @@ impl PathQuery { None } } else { + let last_path_item = path.len() == subquery_path.len(); + let has_subquery = + query.default_subquery_branch.subquery.is_some(); Some(InternalCowItemsQuery::from_key_when_in_path( &subquery_path[path_after_top_removed.len()], + last_path_item, + has_subquery, )) }; } @@ -348,8 +353,12 @@ impl PathQuery { None } } else { + let last_path_item = path.len() == subquery_path.len(); + let has_subquery = query.default_subquery_branch.subquery.is_some(); Some(InternalCowItemsQuery::from_key_when_in_path( &subquery_path[path_after_top_removed.len()], + last_path_item, + has_subquery, )) }; } @@ -381,6 +390,8 @@ impl PathQuery { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { Some(InternalCowItemsQuery::from_key_when_in_path( &self.path[given_path_len], + false, + true, )) } else { None @@ -458,12 +469,23 @@ impl<'a> InternalCowItemsQuery<'a> { } } - pub fn from_key_when_in_path(key: &'a Vec) -> InternalCowItemsQuery<'a> { + pub fn from_key_when_in_path( + key: &'a Vec, + subquery_is_last_path_item: bool, + subquery_has_inner_subquery: bool, + ) -> InternalCowItemsQuery<'a> { + // in this case there should be no in_path, because we are trying to get this + // level of items and nothing underneath + let in_path = if subquery_is_last_path_item && !subquery_has_inner_subquery { + None + } else { + Some(Borrowed(key)) + }; InternalCowItemsQuery { items: Cow::Owned(vec![QueryItem::Key(key.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, - in_path: Some(Borrowed(key)), + in_path, } } @@ -501,7 +523,7 @@ mod tests { }; use crate::{ - query::{HasSubquery, HasSubquery::NoSubquery, InternalCowItemsQuery}, + query::{HasSubquery, InternalCowItemsQuery}, query_result_type::QueryResultType, tests::{common::compare_result_tuples, make_deep_tree, TEST_LEAF}, Element, GroveDb, PathQuery, SizedQuery, @@ -1095,7 +1117,7 @@ mod tests { } #[test] - fn test_identities_contract_keys_proof() { + fn test_path_query_items_with_subquery_and_inner_subquery_path() { // Constructing the keys and paths let root_path_key_1 = b"root_path_key_1".to_vec(); let root_path_key_2 = b"root_path_key_2".to_vec(); @@ -1263,7 +1285,69 @@ mod tests { items: Cow::Owned(vec![QueryItem::Key(inner_subquery_path_key.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, - in_path: Some(Cow::Borrowed(&inner_subquery_path_key)) + in_path: None, + } + ); + } + } + + #[test] + fn test_path_query_items_with_subquery_path() { + // Constructing the keys and paths + let root_path_key = b"higher".to_vec(); + let dash_key = b"dash".to_vec(); + let quantum_key = b"quantum".to_vec(); + + // Constructing the PathQuery + let path_query = PathQuery { + path: vec![root_path_key.clone()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![quantum_key.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + }, + limit: Some(100), + offset: None, + }, + }; + + // Validating the PathQuery structure + { + let path = vec![root_path_key.as_slice()]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), + has_subquery: HasSubquery::Always, + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![root_path_key.as_slice(), dash_key.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(quantum_key.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: None, // There should be no path because we are at the end of the path } ); } diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 49e36749..6ec23177 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -41,6 +41,7 @@ use {super::Op, std::collections::LinkedList}; use super::Node; #[cfg(any(feature = "full", feature = "verify"))] use crate::error::Error; +use crate::proofs::hex_to_ascii; #[cfg(feature = "full")] use crate::tree::kv::ValueDefinedCostType; #[cfg(feature = "full")] @@ -89,16 +90,20 @@ impl fmt::Display for SubqueryBranch { write!(f, "SubqueryBranch {{ ")?; if let Some(path) = &self.subquery_path { write!(f, "subquery_path: [")?; - for (i, element) in path.iter().enumerate() { + for (i, path_part) in path.iter().enumerate() { if i > 0 { write!(f, ", ")? } - write!(f, "{}", hex::encode(element))?; + write!(f, "{}", hex_to_ascii(path_part))?; } write!(f, "], ")?; + } else { + write!(f, "subquery_path: None ")?; } if let Some(subquery) = &self.subquery { write!(f, "subquery: {} ", subquery)?; + } else { + write!(f, "subquery: None ")?; } write!(f, "}}") } From ab8d1e25b713278496a6e1262c05bb766f7a09d3 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 05:13:09 +0700 Subject: [PATCH 20/34] more work --- grovedb/src/operations/proof/generate.rs | 2 +- grovedb/src/query/mod.rs | 90 +++++++++++++++++++++++- grovedb/src/tests/query_tests.rs | 4 +- 3 files changed, 92 insertions(+), 4 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index d19a3061..73f8d89d 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -406,7 +406,7 @@ impl GroveDb { if !done_with_results && query.has_subquery_or_matching_in_path_on_key(key) => { - println!("found tree {}, query is {:?}", hex_to_ascii(key), query); + println!("found tree {}, query is {}", hex_to_ascii(key), query); // We only want to check in sub nodes for the proof if the tree has // elements let mut lower_path = path.clone(); diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index fa54f971..c395d7e3 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -312,8 +312,7 @@ impl PathQuery { } } else { let last_path_item = path.len() == subquery_path.len(); - let has_subquery = - query.default_subquery_branch.subquery.is_some(); + let has_subquery = subquery_branch.subquery.is_some(); Some(InternalCowItemsQuery::from_key_when_in_path( &subquery_path[path_after_top_removed.len()], last_path_item, @@ -422,6 +421,23 @@ pub(crate) enum HasSubquery<'a> { Conditionally(Cow<'a, IndexMap>), } +#[cfg(any(feature = "full", feature = "verify"))] +impl<'a> fmt::Display for HasSubquery<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + HasSubquery::NoSubquery => write!(f, "NoSubquery"), + HasSubquery::Always => write!(f, "Always"), + HasSubquery::Conditionally(map) => { + writeln!(f, "Conditionally {{")?; + for (query_item, subquery_branch) in map.iter() { + writeln!(f, " {query_item}: {subquery_branch},")?; + } + write!(f, "}}") + } + } + } +} + impl<'a> HasSubquery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { @@ -450,6 +466,25 @@ pub(crate) struct InternalCowItemsQuery<'a> { pub in_path: Option>, } +#[cfg(any(feature = "full", feature = "verify"))] +impl<'a> fmt::Display for InternalCowItemsQuery<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "InternalCowItemsQuery {{")?; + writeln!(f, " items: [")?; + for item in self.items.iter() { + writeln!(f, " {item},")?; + } + writeln!(f, " ]")?; + writeln!(f, " has_subquery: {}", self.has_subquery)?; + writeln!(f, " left_to_right: {}", self.left_to_right)?; + match &self.in_path { + Some(path) => writeln!(f, " in_path: Some({})", hex_to_ascii(&path)), + None => writeln!(f, " in_path: None"), + }?; + write!(f, "}}") + } +} + impl<'a> InternalCowItemsQuery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { @@ -521,6 +556,7 @@ mod tests { query::{query_item::QueryItem, SubqueryBranch}, Query, }; + use indexmap::IndexMap; use crate::{ query::{HasSubquery, InternalCowItemsQuery}, @@ -1352,4 +1388,54 @@ mod tests { ); } } + + #[test] + fn test_conditional_subquery_refusing_elements() { + let empty_vec: Vec = vec![]; + let zero_vec: Vec = vec![0]; + + let mut conditional_subquery_branches = IndexMap::new(); + conditional_subquery_branches.insert( + QueryItem::Key(b"".to_vec()), + SubqueryBranch { + subquery_path: Some(vec![zero_vec.clone()]), + subquery: Some(Query::new().into()), + }, + ); + + let path_query = PathQuery { + path: vec![TEST_LEAF.to_vec()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![zero_vec.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: Some(conditional_subquery_branches), + }, + limit: Some(100), + offset: None, + }, + }; + + { + let path = vec![TEST_LEAF, empty_vec.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::Key(zero_vec.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&zero_vec)), + } + ); + } + } } diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index a9503a5e..2427abd3 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -561,9 +561,11 @@ mod tests { let subquery = Query::new(); + // This conditional subquery expresses that we do not want to get values in "" + // tree query.add_conditional_subquery( QueryItem::Key(b"".to_vec()), - Some(vec![b"\0".to_vec()]), + Some(vec![b"\0".to_vec()]), // We want to go into 0 but we don't want to get anything Some(subquery), ); From 8346e2679f9209400da31d07971ae07ddfdde9fa Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 07:45:24 +0700 Subject: [PATCH 21/34] complex conditional queries --- grovedb/src/query/mod.rs | 228 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 222 insertions(+), 6 deletions(-) diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index c395d7e3..9be818e0 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -320,14 +320,24 @@ impl PathQuery { )) }; } + } else if path_after_top_removed + .iter() + .take(subquery_path.len()) + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { + if let Some(subquery) = &subquery_branch.subquery { + return recursive_query_items( + subquery, + &path_after_top_removed[subquery_path.len()..], + ); + } } + } else if let Some(subquery) = &subquery_branch.subquery { + return recursive_query_items(subquery, path_after_top_removed); } - return if let Some(subquery) = &subquery_branch.subquery { - recursive_query_items(subquery, &path[1..]) - } else { - Some(InternalCowItemsQuery::from_query(query)) - }; + return None; } } } @@ -559,7 +569,7 @@ mod tests { use indexmap::IndexMap; use crate::{ - query::{HasSubquery, InternalCowItemsQuery}, + query::{HasSubquery, HasSubquery::NoSubquery, InternalCowItemsQuery}, query_result_type::QueryResultType, tests::{common::compare_result_tuples, make_deep_tree, TEST_LEAF}, Element, GroveDb, PathQuery, SizedQuery, @@ -1438,4 +1448,210 @@ mod tests { ); } } + + #[test] + fn test_complex_path_query_with_conditional_subqueries() { + let identity_id = + hex::decode("8b8948a6801501bbe0431e3d994dcf71cf5a2a0939fe51b0e600076199aba4fb") + .unwrap(); + + let key_20 = vec![20u8]; + + let key_80 = vec![80u8]; + + let inner_conditional_subquery_branches = IndexMap::from([( + QueryItem::Key(vec![80]), + SubqueryBranch { + subquery_path: None, + subquery: Some(Box::new(Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + })), + }, + )]); + + let conditional_subquery_branches = IndexMap::from([ + ( + QueryItem::Key(vec![]), + SubqueryBranch { + subquery_path: None, + subquery: Some(Box::new(Query { + items: vec![QueryItem::Key(identity_id.to_vec())], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + })), + }, + ), + ( + QueryItem::Key(vec![20]), + SubqueryBranch { + subquery_path: Some(vec![identity_id.to_vec()]), + subquery: Some(Box::new(Query { + items: vec![QueryItem::Key(vec![80]), QueryItem::Key(vec![0xc0])], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + conditional_subquery_branches: Some( + inner_conditional_subquery_branches.clone(), + ), + left_to_right: true, + })), + }, + ), + ]); + + let path_query = PathQuery { + path: vec![], + query: SizedQuery { + query: Query { + items: vec![QueryItem::Key(vec![20]), QueryItem::Key(vec![96])], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + conditional_subquery_branches: Some(conditional_subquery_branches.clone()), + left_to_right: true, + }, + limit: Some(100), + offset: None, + }, + }; + + // { + // let path = vec![]; + // let first = path_query + // .query_items_at_path(&path) + // .expect("expected query items"); + // + // assert_eq!( + // first, + // InternalCowItemsQuery { + // items: Cow::Owned(vec![ + // QueryItem::Key(vec![20]), + // QueryItem::Key(vec![96]), + // ]), + // has_subquery: + // HasSubquery::Conditionally(Cow::Borrowed(&conditional_subquery_branches)), + // left_to_right: true, + // in_path: None, + // } + // ); + // } + // + // { + // let path = vec![key_20.as_slice()]; + // let query = path_query + // .query_items_at_path(&path) + // .expect("expected query items"); + // + // assert_eq!( + // query, + // InternalCowItemsQuery { + // items: Cow::Owned(vec![ + // QueryItem::Key(identity_id.clone()), + // ]), + // has_subquery: NoSubquery, + // left_to_right: true, + // in_path: Some(Cow::Borrowed(&identity_id)), + // } + // ); + // } + // + // { + // let path = vec![key_20.as_slice(), identity_id.as_slice()]; + // let query = path_query + // .query_items_at_path(&path) + // .expect("expected query items"); + // + // assert_eq!( + // query, + // InternalCowItemsQuery { + // items: Cow::Owned(vec![ + // QueryItem::Key(vec![80]), + // QueryItem::Key(vec![0xc0]), + // ]), + // has_subquery: + // HasSubquery::Conditionally(Cow::Borrowed(& + // inner_conditional_subquery_branches)), left_to_right: + // true, in_path: None, + // } + // ); + // } + + { + let path = vec![key_20.as_slice(), identity_id.as_slice(), key_80.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + InternalCowItemsQuery { + items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: None, + } + ); + } + // { + // let path = vec![ + // vec![20], + // ]; + // + // let second = path_query + // .query + // .query + // .query_items_at_path(&path) + // .expect("expected query items"); + // + // assert_eq!( + // second, + // InternalCowItemsQuery { + // items: Cow::Owned(vec![ + // QueryItem::Key(vec![80]), + // QueryItem::Key(vec![0xc0]), + // ]), + // has_subquery: HasSubquery::Always, + // left_to_right: true, + // in_path: Some(Cow::Borrowed(&vec![20])), + // } + // ); + // } + // + // { + // let path = vec![ + // vec![20], + // vec![80], + // ]; + // + // let third = path_query + // .query + // .query + // .query_items_at_path(&path) + // .expect("expected query items"); + // + // assert_eq!( + // third, + // InternalCowItemsQuery { + // items: Cow::Owned(vec![ + // QueryItem::RangeFull, + // ]), + // has_subquery: HasSubquery::Always, + // left_to_right: true, + // in_path: Some(Cow::Borrowed(&vec![80])), + // } + // ); + // } + } } From 6f2932abcda40dd0aba7b311409b6a6e3f488bc8 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:41:43 +0700 Subject: [PATCH 22/34] cleanup --- grovedb/Cargo.toml | 1 + grovedb/src/lib.rs | 1 - grovedb/src/operations/mod.rs | 2 - grovedb/src/operations/proof/generate.rs | 471 ++-------- grovedb/src/operations/proof/util.rs | 2 +- grovedb/src/operations/proof/verify.rs | 98 +- grovedb/src/operations/proof_old/generate.rs | 938 ------------------- grovedb/src/operations/proof_old/mod.rs | 11 - grovedb/src/operations/proof_old/util.rs | 501 ---------- grovedb/src/operations/proof_old/verify.rs | 917 ------------------ grovedb/src/query/mod.rs | 69 +- grovedb/src/tests/common.rs | 33 +- grovedb/src/versioning.rs | 63 -- merk/Cargo.toml | 1 + merk/src/proofs/query/mod.rs | 48 +- merk/src/proofs/query/verify.rs | 20 +- 16 files changed, 223 insertions(+), 2953 deletions(-) delete mode 100644 grovedb/src/operations/proof_old/generate.rs delete mode 100644 grovedb/src/operations/proof_old/mod.rs delete mode 100644 grovedb/src/operations/proof_old/util.rs delete mode 100644 grovedb/src/operations/proof_old/verify.rs delete mode 100644 grovedb/src/versioning.rs diff --git a/grovedb/Cargo.toml b/grovedb/Cargo.toml index 10a1e0ce..78ca1302 100644 --- a/grovedb/Cargo.toml +++ b/grovedb/Cargo.toml @@ -46,6 +46,7 @@ harness = false [features] default = ["full"] +proof_debug = [] full = [ "grovedb-merk/full", "thiserror", diff --git a/grovedb/src/lib.rs b/grovedb/src/lib.rs index 649ab11d..d901e83c 100644 --- a/grovedb/src/lib.rs +++ b/grovedb/src/lib.rs @@ -164,7 +164,6 @@ pub mod replication; mod tests; #[cfg(feature = "full")] mod util; -mod versioning; #[cfg(feature = "full")] mod visualize; diff --git a/grovedb/src/operations/mod.rs b/grovedb/src/operations/mod.rs index 49518b96..ba9b8599 100644 --- a/grovedb/src/operations/mod.rs +++ b/grovedb/src/operations/mod.rs @@ -10,8 +10,6 @@ pub(crate) mod get; pub mod insert; #[cfg(feature = "full")] pub(crate) mod is_empty_tree; -// #[cfg(any(feature = "full", feature = "verify"))] -// pub mod proof; #[cfg(any(feature = "full", feature = "verify"))] pub mod proof; diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 73f8d89d..41a0dbe4 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -17,12 +17,12 @@ use grovedb_merk::{ tree::value_hash, Merk, ProofWithoutEncodingResult, }; -use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; +#[cfg(feature = "proof_debug")] +use crate::query_result_type::QueryResultType; use crate::{ operations::proof::util::{element_hex_to_ascii, hex_to_ascii}, - query_result_type::QueryResultType, reference_path::path_from_reference_path_type, Element, Error, GroveDb, PathQuery, }; @@ -205,7 +205,10 @@ impl GroveDb { let mut cost = OperationCost::default(); let proof = cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); - println!("constructed proof is {}", proof); + #[cfg(feature = "proof_debug")] + { + println!("constructed proof is {}", proof); + } let config = bincode::config::standard() .with_big_endian() .with_no_limit(); @@ -241,39 +244,42 @@ impl GroveDb { .wrap_with_cost(cost); } - // we want to query raw because we want the references to not be resolved at - // this point - - let values = cost_return_on_error!( - &mut cost, - self.query_raw( - path_query, - false, - prove_options.decrease_limit_on_empty_sub_query_result, - false, - QueryResultType::QueryPathKeyElementTrioResultType, - None + #[cfg(feature = "proof_debug")] + { + // we want to query raw because we want the references to not be resolved at + // this point + + let values = cost_return_on_error!( + &mut cost, + self.query_raw( + path_query, + false, + prove_options.decrease_limit_on_empty_sub_query_result, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None + ) ) - ) - .0; - - println!("values are {}", values); - - let precomputed_result_map = cost_return_on_error!( - &mut cost, - self.query_raw( - path_query, - false, - prove_options.decrease_limit_on_empty_sub_query_result, - false, - QueryResultType::QueryPathKeyElementTrioResultType, - None + .0; + + println!("values are {}", values); + + let precomputed_result_map = cost_return_on_error!( + &mut cost, + self.query_raw( + path_query, + false, + prove_options.decrease_limit_on_empty_sub_query_result, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None + ) ) - ) - .0 - .to_btree_map_level_results(); + .0 + .to_btree_map_level_results(); - println!("precomputed results are {}", precomputed_result_map); + println!("precomputed results are {}", precomputed_result_map); + } let mut limit = path_query.query.limit; @@ -332,19 +338,22 @@ impl GroveDb { self.generate_merk_proof(&subtree, &query.items, query.left_to_right, limit) ); - println!( - "generated merk proof at level path level [{}], limit is {:?}, {}", - path.iter() - .map(|a| hex_to_ascii(*a)) - .collect::>() - .join("/"), - overall_limit, - if query.left_to_right { - "left to right" - } else { - "right to left" - } - ); + #[cfg(feature = "proof_debug")] + { + println!( + "generated merk proof at level path level [{}], limit is {:?}, {}", + path.iter() + .map(|a| hex_to_ascii(*a)) + .collect::>() + .join("/"), + overall_limit, + if query.left_to_right { + "left to right" + } else { + "right to left" + } + ); + } let mut lower_layers = BTreeMap::new(); @@ -397,7 +406,10 @@ impl GroveDb { has_a_result_at_level |= true; } Ok(Element::Item(..)) if !done_with_results => { - println!("found {}", hex_to_ascii(key)); + #[cfg(feature = "proof_debug")] + { + println!("found {}", hex_to_ascii(key)); + } *node = Node::KV(key.to_owned(), value.to_owned()); overall_limit.as_mut().map(|limit| *limit -= 1); has_a_result_at_level |= true; @@ -406,7 +418,14 @@ impl GroveDb { if !done_with_results && query.has_subquery_or_matching_in_path_on_key(key) => { - println!("found tree {}, query is {}", hex_to_ascii(key), query); + #[cfg(feature = "proof_debug")] + { + println!( + "found tree {}, query is {}", + hex_to_ascii(key), + query + ); + } // We only want to check in sub nodes for the proof if the tree has // elements let mut lower_path = path.clone(); @@ -435,11 +454,14 @@ impl GroveDb { Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) if !done_with_results => { - println!( - "found tree {}, no subquery query is {:?}", - hex_to_ascii(key), - query - ); + #[cfg(feature = "proof_debug")] + { + println!( + "found tree {}, no subquery query is {:?}", + hex_to_ascii(key), + query + ); + } overall_limit.as_mut().map(|limit| *limit -= 1); has_a_result_at_level |= true; } @@ -462,13 +484,16 @@ impl GroveDb { && !done_with_results && prove_options.decrease_limit_on_empty_sub_query_result { - println!( - "no results at level {}", - path.iter() - .map(|a| hex_to_ascii(*a)) - .collect::>() - .join("/") - ); + #[cfg(feature = "proof_debug")] + { + println!( + "no results at level {}", + path.iter() + .map(|a| hex_to_ascii(*a)) + .collect::>() + .join("/") + ); + } overall_limit.as_mut().map(|limit| *limit -= 1); } @@ -509,330 +534,4 @@ impl GroveDb { )) }) } - - /// Converts Items to Node::KV from Node::KVValueHash - /// Converts References to Node::KVRefValueHash and sets the value to the - /// referenced element - fn post_process_merk_proof>( - &self, - path: &SubtreePath, - has_any_subquery: bool, - proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult<(Vec, u16), Error> { - let mut cost = OperationCost::default(); - let mut results_found = 0; - - let mut sub_level_keys = vec![]; - - for op in proof_result.proof.iter_mut() { - match op { - Op::Push(node) | Op::PushInverted(node) => match node { - Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { - let elem = Element::deserialize(value); - match elem { - Ok(Element::Reference(reference_path, ..)) => { - let absolute_path = cost_return_on_error!( - &mut cost, - path_from_reference_path_type( - reference_path, - &path.to_vec(), - Some(key.as_slice()) - ) - .wrap_with_cost(OperationCost::default()) - ); - - let referenced_elem = cost_return_on_error!( - &mut cost, - self.follow_reference( - absolute_path.as_slice().into(), - true, - None - ) - ); - - let serialized_referenced_elem = referenced_elem.serialize(); - if serialized_referenced_elem.is_err() { - return Err(Error::CorruptedData(String::from( - "unable to serialize element", - ))) - .wrap_with_cost(cost); - } - - *node = Node::KVRefValueHash( - key.to_owned(), - serialized_referenced_elem.expect("confirmed ok above"), - value_hash(value).unwrap_add_cost(&mut cost), - ); - results_found += 1; - } - Ok(Element::Item(..)) => { - println!("found {}", hex_to_ascii(key)); - *node = Node::KV(key.to_owned(), value.to_owned()); - results_found += 1; - } - Ok(Element::Tree(Some(_), _)) => { - println!("found tree {}", hex_to_ascii(key)); - // We only want to check in sub nodes for the proof if the tree has - // elements - sub_level_keys.push(key.clone()); - } - Ok(Element::SumTree(Some(_), ..)) => { - // We only want to check in sub nodes for the proof if the tree has - // elements - sub_level_keys.push(key.clone()); - if !has_any_subquery { - results_found += 1; // if there is no - // subquery we return - // Empty trees - } - } - Ok(Element::Tree(None, _)) | Ok(Element::SumTree(None, ..)) => { - if !has_any_subquery { - results_found += 1; // if there is no - // subquery we return - // Empty trees - } - } - _ => continue, - } - } - _ => continue, - }, - _ => continue, - } - } - - Ok((sub_level_keys, results_found)).wrap_with_cost(cost) - } } -// #[cfg(test)] -// mod tests { -// use grovedb_merk::{execute_proof, proofs::Query}; -// use grovedb_storage::StorageBatch; -// -// use crate::{ -// operations::proof::util::{ProofReader, ProofTokenType}, -// tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, -// GroveDb, -// }; -// -// #[test] -// fn test_path_info_encoding_and_decoding() { -// let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; -// let mut proof_vector = vec![]; -// GroveDb::generate_and_store_path_proof(path.clone(), &mut -// proof_vector) .unwrap() -// .unwrap(); -// -// let mut proof_reader = ProofReader::new(proof_vector.as_slice()); -// let decoded_path = proof_reader.read_path_info().unwrap(); -// -// assert_eq!(path, decoded_path); -// } -// -// #[test] -// fn test_reading_of_verbose_proofs() { -// let db = make_deep_tree(); -// -// let path = vec![TEST_LEAF, b"innertree"]; -// let mut query = Query::new(); -// query.insert_all(); -// -// let batch = StorageBatch::new(); -// -// let merk = db -// .open_non_transactional_merk_at_path( -// [TEST_LEAF, b"innertree"].as_ref().into(), -// Some(&batch), -// ) -// .unwrap() -// .unwrap(); -// let expected_root_hash = merk.root_hash().unwrap(); -// -// let mut proof = vec![]; -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proof, -// true, -// b"innertree", -// ) -// .unwrap() -// .unwrap(); -// assert_ne!(proof.len(), 0); -// -// let mut proof_reader = ProofReader::new(&proof); -// let (proof_token_type, proof, key) = -// proof_reader.read_verbose_proof().unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// assert_eq!(key, Some(b"innertree".to_vec())); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, -// true) .unwrap() -// .unwrap(); -// assert_eq!(root_hash, expected_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // what is the key is empty?? -// let merk = db -// .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) -// .unwrap() -// .unwrap(); -// let expected_root_hash = merk.root_hash().unwrap(); -// -// let mut proof = vec![]; -// db.generate_and_store_merk_proof( -// &EMPTY_PATH, -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proof, -// true, -// &[], -// ) -// .unwrap() -// .unwrap(); -// assert_ne!(proof.len(), 0); -// -// let mut proof_reader = ProofReader::new(&proof); -// let (proof_token_type, proof, key) = -// proof_reader.read_verbose_proof().unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// assert_eq!(key, Some(vec![])); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, -// true) .unwrap() -// .unwrap(); -// assert_eq!(root_hash, expected_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// } -// -// #[test] -// fn test_reading_verbose_proof_at_key() { -// // going to generate an array of multiple proofs with different keys -// let db = make_deep_tree(); -// let mut proofs = vec![]; -// -// let mut query = Query::new(); -// query.insert_all(); -// -// // insert all under inner tree -// let path = vec![TEST_LEAF, b"innertree"]; -// -// let batch = StorageBatch::new(); -// -// let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), -// Some(&batch)) .unwrap() -// .unwrap(); -// let inner_tree_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // insert all under innertree4 -// let path = vec![TEST_LEAF, b"innertree4"]; -// let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), -// Some(&batch)) .unwrap() -// .unwrap(); -// let inner_tree_4_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // insert all for deeper_1 -// let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", -// b"deeper_1"]; let merk = db -// .open_non_transactional_merk_at_path(path.as_slice().into(), -// Some(&batch)) .unwrap() -// .unwrap(); -// let deeper_1_root_hash = merk.root_hash().unwrap(); -// db.generate_and_store_merk_proof( -// &path.as_slice().into(), -// &merk, -// &query, -// None, -// ProofTokenType::Merk, -// &mut proofs, -// true, -// path.iter().last().unwrap_or(&(&[][..])), -// ) -// .unwrap() -// .unwrap(); -// -// // read the proof at innertree -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = proof_reader -// .read_verbose_proof_at_key(b"innertree") -// .unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, -// true) .unwrap() -// .unwrap(); -// assert_eq!(root_hash, inner_tree_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // read the proof at innertree4 -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = proof_reader -// .read_verbose_proof_at_key(b"innertree4") -// .unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, -// true) .unwrap() -// .unwrap(); -// assert_eq!(root_hash, inner_tree_4_root_hash); -// assert_eq!(result_set.result_set.len(), 2); -// -// // read the proof at deeper_1 -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let (proof_token_type, proof) = -// proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); -// -// assert_eq!(proof_token_type, ProofTokenType::Merk); -// -// let (root_hash, result_set) = execute_proof(&proof, &query, None, -// true) .unwrap() -// .unwrap(); -// assert_eq!(root_hash, deeper_1_root_hash); -// assert_eq!(result_set.result_set.len(), 3); -// -// // read the proof at an invalid key -// let contextual_proof = proofs.clone(); -// let mut proof_reader = ProofReader::new(&contextual_proof); -// let reading_result = -// proof_reader.read_verbose_proof_at_key(b"unknown_key"); assert! -// (reading_result.is_err()) } -// } diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 4d28974d..b3ceb604 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -1,4 +1,4 @@ -use std::{fmt, fmt::Formatter}; +use std::fmt; use grovedb_merk::{ proofs::query::{Key, Path, ProvedKeyOptionalValue, ProvedKeyValue}, diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 01de1214..64a7c2d5 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -9,13 +9,14 @@ use grovedb_merk::{ CryptoHash, }; +#[cfg(feature = "proof_debug")] +use crate::operations::proof::util::{ + hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, +}; use crate::{ operations::proof::{ generate::{GroveDBProof, GroveDBProofV0, LayerProof}, - util::{ - element_hex_to_ascii, hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, - ProvedPathKeyOptionalValue, ProvedPathKeyValues, - }, + util::{ProvedPathKeyOptionalValue, ProvedPathKeyValues}, ProveOptions, }, query_result_type::PathKeyOptionalElementTrio, @@ -118,18 +119,38 @@ impl GroveDb { .into_iter() .map(|(path, key, element)| ((path, key), element)) .collect(); - - println!( - "t{:?}, r{:?}", - terminal_keys - .iter() - .map(|(path, key)| (path_hex_to_ascii(path), hex_to_ascii(key))) - .collect::>(), - result_set_as_map - .iter() - .map(|((path, key), e)| ((path_hex_to_ascii(path), hex_to_ascii(key)), e)) - .collect::>() - ); + #[cfg(feature = "proof_debug")] + { + println!( + "terminal keys are [{}] \n result set is [{}]", + terminal_keys + .iter() + .map(|(path, key)| format!( + "path: {} key: {}", + path_hex_to_ascii(path), + hex_to_ascii(key) + )) + .collect::>() + .join(", "), + result_set_as_map + .iter() + .map(|((path, key), e)| { + let element_string = if let Some(e) = e { + e.to_string() + } else { + "None".to_string() + }; + format!( + "path: {} key: {} element: {}", + path_hex_to_ascii(path), + hex_to_ascii(key), + e + ) + }) + .collect::>() + .join(", ") + ); + } result = terminal_keys .into_iter() @@ -217,12 +238,14 @@ impl GroveDb { eprintln!("{e}"); Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) })?; - - println!( - "current path {} merk result is {}", - path_as_slices_hex_to_ascii(current_path), - merk_result - ); + #[cfg(feature = "proof_debug")] + { + println!( + "current path {} \n merk result is {}", + path_as_slices_hex_to_ascii(current_path), + merk_result + ); + } let mut verified_keys = BTreeSet::new(); @@ -239,7 +262,10 @@ impl GroveDb { verified_keys.insert(key.clone()); if let Some(lower_layer) = layer_proof.lower_layers.get(key) { - println!("lower layer had key {}", hex_to_ascii(key)); + #[cfg(feature = "proof_debug")] + { + println!("lower layer had key {}", hex_to_ascii(key)); + } match element { Element::Tree(Some(_), _) | Element::SumTree(Some(_), ..) => { path.push(key); @@ -287,10 +313,13 @@ impl GroveDb { path.iter().map(|p| p.to_vec()).collect(), proved_key_value, ); - println!( - "pushing {} limit left after is {:?}", - &path_key_optional_value, limit_left - ); + #[cfg(feature = "proof_debug")] + { + println!( + "pushing {} limit left after is {:?}", + &path_key_optional_value, limit_left + ); + } result.push(path_key_optional_value.try_into()?); limit_left.as_mut().map(|limit| *limit -= 1); @@ -298,12 +327,15 @@ impl GroveDb { break; } } else { - println!( - "we have subquery on key {} with value {}: {}", - hex_to_ascii(key), - element, - level_query - ) + #[cfg(feature = "proof_debug")] + { + println!( + "we have subquery on key {} with value {}: {}", + hex_to_ascii(key), + element, + level_query + ) + } } } } diff --git a/grovedb/src/operations/proof_old/generate.rs b/grovedb/src/operations/proof_old/generate.rs deleted file mode 100644 index f4974bd6..00000000 --- a/grovedb/src/operations/proof_old/generate.rs +++ /dev/null @@ -1,938 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Generate proof operations - -// TODO: entire file is due for a refactor, need some kind of path generator -// that supports multiple implementations for verbose and non-verbose -// generation - -use std::collections::BTreeMap; -use grovedb_costs::{ - cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, - CostsExt, OperationCost, -}; -use grovedb_merk::{ - proofs::{encode_into, Node, Op}, - tree::value_hash, - KVIterator, Merk, ProofWithoutEncodingResult, -}; -use grovedb_merk::proofs::query::{Key, Path}; -use grovedb_path::SubtreePath; -use grovedb_storage::StorageContext; - -use crate::{ - element::helpers::raw_decode, - operations::proof::util::{ - increase_limit_by, reduce_limit_by, write_slice_of_slice_to_slice, - write_slice_to_vec, write_to_vec, ProofTokenType, - }, - reference_path::path_from_reference_path_type, - versioning::{prepend_version_to_bytes, PROOF_VERSION}, - Element, Error, GroveDb, PathQuery, Query, -}; -use crate::query_result_type::{BTreeMapLevelResult, QueryResultType}; - -#[derive(Debug, Clone, Copy)] -pub struct ProveOptions { - pub is_verbose: bool, - pub multilevel_results: bool, -} - -impl Default for ProveOptions { - fn default() -> Self { - ProveOptions { - is_verbose: false, - multilevel_results: false, - } - } -} - -impl GroveDb { - /// Prove one or more path queries. - /// If we more than one path query, we merge into a single path query before - /// proving. - pub fn prove_query_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { - if query.len() > 1 { - let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query, prove_options) - } else { - self.prove_query(query[0], prove_options) - } - } - - /// Prove one or more path queries verbose. - /// If we more than one path query, we merge into a single path query before - /// proving verbose. - pub fn prove_verbose_many(&self, query: Vec<&PathQuery>, prove_options: Option) -> CostResult, Error> { - if query.len() > 1 { - let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query, prove_options) - } else { - self.prove_query(query[0], prove_options) - } - } - - /// Generate a minimalistic proof for a given path query - /// doesn't allow for subset verification - /// Proofs generated with this can only be verified by the path query used - /// to generate them. - pub fn prove_query(&self, query: &PathQuery, prove_options: Option) -> CostResult, Error> { - self.prove_internal(query, prove_options) - } - - /// Generates a verbose or non-verbose proof based on a bool - fn prove_internal(&self, path_query: &PathQuery, prove_options: Option) -> CostResult, Error> { - let ProveOptions { - is_verbose, multilevel_results - } = prove_options.unwrap_or_default(); - let mut cost = OperationCost::default(); - - if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { - return Err(Error::InvalidQuery("proved path queries can not have offsets")).wrap_with_cost(cost); - } - - let mut proof_result = - cost_return_on_error_default!(prepend_version_to_bytes(vec![], PROOF_VERSION)); - - let path_slices = path_query.path.iter().map(|x| x.as_slice()).collect::>(); - - let subtree_exists = self - .check_subtree_exists_path_not_found(path_slices.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - // if the subtree at the given path doesn't exist, prove that this path - // doesn't point to a valid subtree - match subtree_exists { - Ok(_) => { - // subtree exists - // do nothing - } - Err(_) => { - cost_return_on_error!( - &mut cost, - self.generate_and_store_absent_path_proof( - &path_slices, - &mut proof_result, - is_verbose - ) - ); - // return the absence proof no need to continue proof generation - return Ok(proof_result).wrap_with_cost(cost); - } - } - - // if the subtree exists and the proof type is verbose we need to insert - // the path information to the proof - if is_verbose { - cost_return_on_error!( - &mut cost, - Self::generate_and_store_path_proof(path_slices.clone(), &mut proof_result) - ); - } - - let mut limit: Option = path_query.query.limit; - - let precomputed_result_map = if !multilevel_results || limit.is_none() { - None - } else { - let result = cost_return_on_error!( - &mut cost, - self.query(path_query, false, true, false, QueryResultType::QueryPathKeyElementTrioResultType, None)).0; - Some(result.to_btree_map_level_results()) - }; - - println!("precomputed results are {:?}", precomputed_result_map); - - cost_return_on_error!( - &mut cost, - self.prove_subqueries( - &mut proof_result, - path_slices.clone(), - path_query, - &mut limit, - true, - is_verbose, - &precomputed_result_map, - ) - ); - cost_return_on_error!( - &mut cost, - self.prove_path(&mut proof_result, path_slices, is_verbose) - ); - - Ok(proof_result).wrap_with_cost(cost) - } - - /// Perform a pre-order traversal of the tree based on the provided - /// subqueries - fn prove_subqueries( - &self, - proofs: &mut Vec, - path: Vec<&[u8]>, - query: &PathQuery, - current_limit: &mut Option, - is_first_call: bool, - is_verbose: bool, - precomputed_results: &Option - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - let mut to_add_to_result_set: u16 = 0; - - let subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path(path.as_slice().into(), None) - ); - if !subtree.has_root_key() { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(()).wrap_with_cost(cost); - } - - let precomputed_items_count = precomputed_results.as_ref().map(|level_results| level_results.len_of_values_at_path(path.as_slice())); - - let reached_limit = current_limit.map_or(false, |limit| limit == 0); - if reached_limit { - if is_first_call { - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - *current_limit, - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&(&[][..])) - ) - ); - } - return Ok(()).wrap_with_cost(cost); - } - - let mut is_leaf_tree = true; - - let mut limit_inc = 0; - - let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) - .unwrap_add_cost(&mut cost); - - //let mut elements_to_prove = vec![]; - - while let Some((key, value_bytes)) = kv_iterator.next_kv().unwrap_add_cost(&mut cost) { - let mut encountered_absence = false; - - let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); - match element { - Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { - let (mut subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query(&query.query, &key); - - if subquery_value.is_none() && subquery_path.is_none() { - // this element should be added to the result set - // hence we have to update the limit and offset value - reduce_limit_by(current_limit, 1); - limit_inc += 1; - continue; - } - - if root_key.is_none() { - continue; - } - - // if the element is a non-empty tree then current tree is not a leaf tree - if is_leaf_tree { - let proof_token_type = if precomputed_items_count.is_some() { - ProofTokenType::SizedMerk - } else { - ProofTokenType::Merk - }; - is_leaf_tree = false; - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - precomputed_items_count, - proof_token_type, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) - ) - ); - } - - let mut new_path = path.clone(); - new_path.push(key.as_ref()); - - let mut query = subquery_value; - - if query.is_some() { - if let Some(subquery_path) = &subquery_path { - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None, - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - None, - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); - - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) - .unwrap_add_cost(&mut cost) - .is_err() - { - encountered_absence = true; - break; - } - } - - if encountered_absence { - continue; - } - } - } else if let Some(subquery_path) = &mut subquery_path { - if subquery_path.is_empty() { - // nothing to do on this path, since subquery path is empty - // and there is no consecutive subquery value - continue; - } - - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - None, - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); - - // check if the new path points to a valid subtree - // if it does not, we should stop proof generation on this path - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) - .unwrap_add_cost(&mut cost) - .is_err() - { - encountered_absence = true; - break; - } - } - - if encountered_absence { - continue; - } - - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key); - query = Some(key_as_query); - } else { - return Err(Error::CorruptedCodeExecution("subquery_path must exist")) - .wrap_with_cost(cost); - } - - let new_path_owned = new_path.iter().map(|a| a.to_vec()).collect(); - - let new_path_query = PathQuery::new_unsized(new_path_owned, query.unwrap()); - - if self - .check_subtree_exists_path_not_found(new_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost) - .is_err() - { - continue; - } - - cost_return_on_error!( - &mut cost, - self.prove_subqueries( - proofs, - new_path, - &new_path_query, - current_limit, - false, - is_verbose, - precomputed_results, - ) - ); - - if *current_limit == Some(0) { - break; - } - } - _ => { - to_add_to_result_set += 1; - } - } - } - - if is_leaf_tree { - // if no useful subtree, then we care about the result set of this subtree. - // apply the sized query - increase_limit_by(current_limit, limit_inc); - let limit_offset = cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - *current_limit, - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) - ) - ); - - // update limit - *current_limit = limit_offset; - } else { - reduce_limit_by(current_limit, to_add_to_result_set); - } - - Ok(()).wrap_with_cost(cost) - } - - /// Given a path, construct and append a set of proofs that shows there is - /// a valid path from the root of the db to that point. - fn prove_path( - &self, - proof_result: &mut Vec, - path_slices: Vec<&[u8]>, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - // generate proof to show that the path leads up to the root - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - let subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path(path_slice.into(), None) - ); - let mut query = Query::new(); - query.insert_key(key.to_vec()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path_slice.into(), - &subtree, - &query, - None, - ProofTokenType::Merk, - proof_result, - is_verbose, - path_slice.iter().last().unwrap_or(&Default::default()) - ) - ); - split_path = path_slice.split_last(); - } - Ok(()).wrap_with_cost(cost) - } - - /// Generates query proof given a subtree and appends the result to a proof - /// list - fn generate_and_store_merk_proof<'a, S, B>( - &self, - path: &SubtreePath, - subtree: &'a Merk, - query: &Query, - limit: Option, - proof_token_type: ProofTokenType, - proofs: &mut Vec, - is_verbose: bool, - key: &[u8], - ) -> CostResult, Error> - where - S: StorageContext<'a> + 'a, - B: AsRef<[u8]>, - { - if proof_token_type != ProofTokenType::Merk && proof_token_type != ProofTokenType::SizedMerk - { - return Err(Error::InvalidInput( - "expect proof type for merk proof generation to be sized or merk proof type", - )) - .wrap_with_cost(Default::default()); - } - println!("generate_and_store_merk_proof path {:?} query {:?} limit_offset {:?} proof_token_type {}", path.to_vec().into_iter().map(hex::encode).collect::>().join("/"), query, limit, proof_token_type); - - let mut cost = OperationCost::default(); - - // if the subtree is empty, return the EmptyTree proof op - if !subtree.has_root_key() { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(limit).wrap_with_cost(cost); - } - - let mut proof_result = cost_return_on_error_no_add!( - &cost, - subtree - .prove_without_encoding(query.clone(), limit) - .unwrap() - .map_err(|_e| Error::InternalError("failed to generate proof")) - ); - - cost_return_on_error!(&mut cost, self.post_process_proof(path, &mut proof_result)); - - let mut proof_bytes = Vec::with_capacity(128); - encode_into(proof_result.proof.iter(), &mut proof_bytes); - - cost_return_on_error_no_add!(&cost, write_to_vec(proofs, &[proof_token_type.into()])); - - // if is verbose, write the key - if is_verbose { - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, key)); - } - - // write the merk proof - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, &proof_bytes)); - - Ok(proof_result.limit).wrap_with_cost(cost) - } - - /// Serializes a path and add it to the proof vector - fn generate_and_store_path_proof( - path: Vec<&[u8]>, - proofs: &mut Vec, - ) -> CostResult<(), Error> { - let cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) - ); - - cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); - - Ok(()).wrap_with_cost(cost) - } - - fn generate_and_store_absent_path_proof( - &self, - path_slices: &[&[u8]], - proof_result: &mut Vec, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) - ); - let mut current_path: Vec<&[u8]> = vec![]; - - let mut split_path = path_slices.split_first(); - while let Some((key, path_slice)) = split_path { - let subtree = self - .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - let Ok(subtree) = subtree else { - break; - }; - - let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); - - let mut next_key_query = Query::new(); - next_key_query.insert_key(key.to_vec()); - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - ¤t_path.as_slice().into(), - &subtree, - &next_key_query, - None, - ProofTokenType::Merk, - proof_result, - is_verbose, - current_path.iter().last().unwrap_or(&(&[][..])) - ) - ); - - current_path.push(key); - - if has_item.is_err() || path_slice.is_empty() { - // reached last key - break; - } - - split_path = path_slice.split_first(); - } - - Ok(()).wrap_with_cost(cost) - } - - /// Converts Items to Node::KV from Node::KVValueHash - /// Converts References to Node::KVRefValueHash and sets the value to the - /// referenced element - fn post_process_proof>( - &self, - path: &SubtreePath, - proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - for op in proof_result.proof.iter_mut() { - match op { - Op::Push(node) | Op::PushInverted(node) => match node { - Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { - let elem = Element::deserialize(value); - match elem { - Ok(Element::Reference(reference_path, ..)) => { - let absolute_path = cost_return_on_error!( - &mut cost, - path_from_reference_path_type( - reference_path, - &path.to_vec(), - Some(key.as_slice()) - ) - .wrap_with_cost(OperationCost::default()) - ); - - let referenced_elem = cost_return_on_error!( - &mut cost, - self.follow_reference( - absolute_path.as_slice().into(), - true, - None - ) - ); - - let serialized_referenced_elem = referenced_elem.serialize(); - if serialized_referenced_elem.is_err() { - return Err(Error::CorruptedData(String::from( - "unable to serialize element", - ))) - .wrap_with_cost(cost); - } - - *node = Node::KVRefValueHash( - key.to_owned(), - serialized_referenced_elem.expect("confirmed ok above"), - value_hash(value).unwrap_add_cost(&mut cost), - ) - } - Ok(Element::Item(..)) => { - *node = Node::KV(key.to_owned(), value.to_owned()) - } - _ => continue, - } - } - _ => continue, - }, - _ => continue, - } - } - Ok(()).wrap_with_cost(cost) - } -} - -#[cfg(test)] -mod tests { - use grovedb_merk::{execute_proof, proofs::Query}; - use grovedb_storage::StorageBatch; - - use crate::{ - operations::proof::util::{ProofReader, ProofTokenType}, - tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, - GroveDb, - }; - - #[test] - fn test_path_info_encoding_and_decoding() { - let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; - let mut proof_vector = vec![]; - GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) - .unwrap() - .unwrap(); - - let mut proof_reader = ProofReader::new(proof_vector.as_slice()); - let decoded_path = proof_reader.read_path_info().unwrap(); - - assert_eq!(path, decoded_path); - } - - #[test] - fn test_reading_of_verbose_proofs() { - let db = make_deep_tree(); - - let path = vec![TEST_LEAF, b"innertree"]; - let mut query = Query::new(); - query.insert_all(); - - let batch = StorageBatch::new(); - - let merk = db - .open_non_transactional_merk_at_path( - [TEST_LEAF, b"innertree"].as_ref().into(), - Some(&batch), - ) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proof, - true, - b"innertree", - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(b"innertree".to_vec())); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // what is the key is empty?? - let merk = db - .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &EMPTY_PATH, - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proof, - true, - &[], - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(vec![])); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); - } - - #[test] - fn test_reading_verbose_proof_at_key() { - // going to generate an array of multiple proofs with different keys - let db = make_deep_tree(); - let mut proofs = vec![]; - - let mut query = Query::new(); - query.insert_all(); - - // insert all under inner tree - let path = vec![TEST_LEAF, b"innertree"]; - - let batch = StorageBatch::new(); - - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all under innertree4 - let path = vec![TEST_LEAF, b"innertree4"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_4_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all for deeper_1 - let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let deeper_1_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - None, - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // read the proof at innertree - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at innertree4 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree4") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_4_root_hash); - assert_eq!(result_set.result_set.len(), 2); - - // read the proof at deeper_1 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = - proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, deeper_1_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at an invalid key - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); - assert!(reading_result.is_err()) - } -} diff --git a/grovedb/src/operations/proof_old/mod.rs b/grovedb/src/operations/proof_old/mod.rs deleted file mode 100644 index f505b3eb..00000000 --- a/grovedb/src/operations/proof_old/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! Proof operations - -// #[cfg(feature = "full")] -// mod generate; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod util; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod verify; - -// #[cfg(feature = "full")] -// pub use generate::ProveOptions; diff --git a/grovedb/src/operations/proof_old/util.rs b/grovedb/src/operations/proof_old/util.rs deleted file mode 100644 index c3749a16..00000000 --- a/grovedb/src/operations/proof_old/util.rs +++ /dev/null @@ -1,501 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -use std::fmt; -#[cfg(any(feature = "full", feature = "verify"))] -use std::io::Read; -#[cfg(feature = "full")] -use std::io::Write; - -use grovedb_merk::{ - proofs::query::{Key, Path, ProvedKeyValue}, - CryptoHash, -}; -#[cfg(any(feature = "full", feature = "verify"))] -use integer_encoding::{VarInt, VarIntReader}; - -#[cfg(any(feature = "full", feature = "verify"))] -use crate::Error; -use crate::operations::proof::verify::ProvedKeyValues; - -#[cfg(any(feature = "full", feature = "verify"))] -pub const EMPTY_TREE_HASH: [u8; 32] = [0; 32]; - -pub type ProofTokenInfo = (ProofTokenType, Vec, Option>); - -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, PartialEq, Eq)] -/// Proof type -// TODO: there might be a better name for this -pub enum ProofTokenType { - Merk, - SizedMerk, - EmptyTree, - AbsentPath, - PathInfo, - Invalid, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl fmt::Display for ProofTokenType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let variant_str = match self { - ProofTokenType::Merk => "Merk", - ProofTokenType::SizedMerk => "SizedMerk", - ProofTokenType::EmptyTree => "EmptyTree", - ProofTokenType::AbsentPath => "AbsentPath", - ProofTokenType::PathInfo => "PathInfo", - ProofTokenType::Invalid => "Invalid", - }; - write!(f, "{}", variant_str) - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl From for u8 { - fn from(proof_token_type: ProofTokenType) -> Self { - match proof_token_type { - ProofTokenType::Merk => 0x01, - ProofTokenType::SizedMerk => 0x02, - ProofTokenType::EmptyTree => 0x04, - ProofTokenType::AbsentPath => 0x05, - ProofTokenType::PathInfo => 0x06, - ProofTokenType::Invalid => 0x10, - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl From for ProofTokenType { - fn from(val: u8) -> Self { - match val { - 0x01 => ProofTokenType::Merk, - 0x02 => ProofTokenType::SizedMerk, - 0x04 => ProofTokenType::EmptyTree, - 0x05 => ProofTokenType::AbsentPath, - 0x06 => ProofTokenType::PathInfo, - _ => ProofTokenType::Invalid, - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl ProofTokenType { - pub fn u8_to_display(val: u8) -> String { - match val { - 0x01 => "merk".to_string(), - 0x02 => "sized merk".to_string(), - 0x04 => "empty tree".to_string(), - 0x05 => "absent path".to_string(), - 0x06 => "path info".to_string(), - v => format!("invalid proof token {}", v), - } - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug)] -// TODO: possibility for a proof writer?? -/// Proof reader -pub struct ProofReader<'a> { - proof_data: &'a [u8], - is_verbose: bool, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl<'a> ProofReader<'a> { - /// New proof reader - pub fn new(proof_data: &'a [u8]) -> Self { - Self { - proof_data, - is_verbose: false, - } - } - - /// New proof reader with verbose_status - pub fn new_with_verbose_status(proof_data: &'a [u8], is_verbose: bool) -> Self { - Self { - proof_data, - is_verbose, - } - } - - /// For non verbose proof read the immediate next proof, for verbose proof - /// read the first proof that matches a given key - pub fn read_next_proof(&mut self, key: &[u8]) -> Result<(ProofTokenType, Vec), Error> { - if self.is_verbose { - self.read_verbose_proof_at_key(key) - } else { - let (proof_token_type, proof, _) = self.read_proof_with_optional_type(None)?; - Ok((proof_token_type, proof)) - } - } - - /// Read the next proof, return the proof type - pub fn read_proof(&mut self) -> Result { - if self.is_verbose { - self.read_verbose_proof_with_optional_type(None) - } else { - self.read_proof_with_optional_type(None) - } - } - - /// Read verbose proof - pub fn read_verbose_proof(&mut self) -> Result { - self.read_verbose_proof_with_optional_type(None) - } - - /// Reads data from proof into slice of specific size - fn read_into_slice(&mut self, buf: &mut [u8]) -> Result { - self.proof_data - .read(buf) - .map_err(|_| Error::CorruptedData(String::from("failed to read proof data"))) - } - - /// Read varint encoded length information from proof data - fn read_length_data(&mut self) -> Result { - self.proof_data - .read_varint() - .map_err(|_| Error::InvalidProof("expected length data".to_string())) - } - - /// Read proof with optional type - pub fn read_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, _) = - self.read_proof_internal_with_optional_type(expected_data_type_option, false)?; - Ok((proof_token_type, proof, None)) - } - - /// Read verbose proof with optional type - pub fn read_verbose_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, key) = - self.read_proof_internal_with_optional_type(expected_data_type_option, true)?; - Ok(( - proof_token_type, - proof, - Some(key.ok_or(Error::InvalidProof( - "key must exist for verbose merk proofs".to_string(), - ))?), - )) - } - - /// Read verbose proof at key - /// Returns an error if it can't find a proof for that key - pub fn read_verbose_proof_at_key( - &mut self, - expected_key: &[u8], - ) -> Result<(ProofTokenType, Vec), Error> { - let (proof_token_type, proof, _) = loop { - let (proof_token_type, proof, key) = self.read_verbose_proof()?; - let key = key.expect("read_verbose_proof enforces that this exists"); - if key.as_slice() == expected_key { - break (proof_token_type, proof, key); - } - }; - - Ok((proof_token_type, proof)) - } - - /// Read proof with optional type - pub fn read_proof_internal_with_optional_type( - &mut self, - expected_data_type_option: Option, - is_verbose: bool, - ) -> Result { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if let Some(expected_data_type) = expected_data_type_option { - if data_type[0] != expected_data_type { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected {}, got {}", - expected_data_type, data_type[0] - ))); - } - } - - let proof_token_type: ProofTokenType = data_type[0].into(); - - if proof_token_type == ProofTokenType::EmptyTree - || proof_token_type == ProofTokenType::AbsentPath - { - return Ok((proof_token_type, vec![], None)); - } - - let (proof, key) = if proof_token_type == ProofTokenType::Merk - || proof_token_type == ProofTokenType::SizedMerk - { - // if verbose we need to read the key first - let key = if is_verbose { - let key_length = self.read_length_data()?; - - let mut key = vec![0; key_length]; - self.read_into_slice(&mut key)?; - - Some(key) - } else { - None - }; - - let proof_length = self.read_length_data()?; - - let mut proof = vec![0; proof_length]; - self.read_into_slice(&mut proof)?; - - (proof, key) - } else { - return Err(Error::InvalidProof( - "expected merk or sized merk proof".to_string(), - )); - }; - - Ok((proof_token_type, proof, key)) - } - - /// Reads path information from the proof vector - pub fn read_path_info(&mut self) -> Result>, Error> { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if data_type != [Into::::into(ProofTokenType::PathInfo)] { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected path_info, got {}", - ProofTokenType::u8_to_display(data_type[0]) - ))); - } - - let mut path = vec![]; - let path_slice_len = self.read_length_data()?; - - for _ in 0..path_slice_len { - let path_len = self.read_length_data()?; - let mut path_value = vec![0; path_len]; - self.read_into_slice(&mut path_value)?; - path.push(path_value); - } - - Ok(path) - } -} - -#[cfg(feature = "full")] -/// Write to vec -// TODO: this can error out handle the error -pub fn write_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - dest.write_all(value) - .map_err(|_e| Error::InternalError("failed to write to vector")) -} - -#[cfg(feature = "full")] -/// Write a slice to the vector, first write the length of the slice -pub fn write_slice_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - write_to_vec(dest, value)?; - Ok(()) -} - -#[cfg(feature = "full")] -/// Write a slice of a slice to a flat vector:w -pub fn write_slice_of_slice_to_slice(dest: &mut W, value: &[&[u8]]) -> Result<(), Error> { - // write the number of slices we are about to write - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - for inner_slice in value { - write_slice_to_vec(dest, inner_slice)?; - } - Ok(()) -} - -#[cfg(any(feature = "full", feature = "verify"))] -pub fn reduce_limit_by( - limit: &mut Option, - n: u16, -) { - if let Some(limit_value) = *limit { - if limit_value > 0 { - if limit_value >= n { - *limit = Some(limit_value - n); - } else { - *limit = Some(0); - } - } - } -} - -pub fn increase_limit_by( - limit: &mut Option, - limit_inc: u16, -) { - if let Some(limit_value) = *limit { - *limit = Some(limit_value + limit_inc); - } -} - -/// Proved path-key-values -pub type ProvedPathKeyValues = Vec; - -/// Proved path-key-value -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, PartialEq, Eq)] -pub struct ProvedPathKeyValue { - /// Path - pub path: Path, - /// Key - pub key: Key, - /// Value - pub value: Vec, - /// Proof - pub proof: CryptoHash, -} - -impl ProvedPathKeyValue { - // TODO: make path a reference - /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a - /// Path - pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyValue) -> Self { - Self { - path, - key: proved_key_value.key, - value: proved_key_value.value, - proof: proved_key_value.proof, - } - } - - /// Transforms multiple ProvedKeyValues to their equivalent - /// ProvedPathKeyValue given a Path - pub fn from_proved_key_values(path: Path, proved_key_values: ProvedKeyValues) -> Vec { - proved_key_values - .into_iter() - .map(|pkv| Self::from_proved_key_value(path.clone(), pkv)) - .collect() - } -} - -#[cfg(test)] -mod tests { - use grovedb_merk::proofs::query::ProvedKeyValue; - - use crate::operations::proof::util::{ProofTokenType, ProvedPathKeyValue}; - - #[test] - fn test_proof_token_type_encoding() { - assert_eq!(0x01_u8, Into::::into(ProofTokenType::Merk)); - assert_eq!(0x02_u8, Into::::into(ProofTokenType::SizedMerk)); - assert_eq!(0x04_u8, Into::::into(ProofTokenType::EmptyTree)); - assert_eq!(0x05_u8, Into::::into(ProofTokenType::AbsentPath)); - assert_eq!(0x06_u8, Into::::into(ProofTokenType::PathInfo)); - assert_eq!(0x10_u8, Into::::into(ProofTokenType::Invalid)); - } - - #[test] - fn test_proof_token_type_decoding() { - assert_eq!(ProofTokenType::Merk, 0x01_u8.into()); - assert_eq!(ProofTokenType::SizedMerk, 0x02_u8.into()); - assert_eq!(ProofTokenType::EmptyTree, 0x04_u8.into()); - assert_eq!(ProofTokenType::AbsentPath, 0x05_u8.into()); - assert_eq!(ProofTokenType::PathInfo, 0x06_u8.into()); - assert_eq!(ProofTokenType::Invalid, 0x10_u8.into()); - } - - #[test] - fn test_proved_path_from_single_proved_key_value() { - let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value = ProvedKeyValue { - key: b"a".to_vec(), - value: vec![5, 6], - proof: [0; 32], - }; - let proved_path_key_value = - ProvedPathKeyValue::from_proved_key_value(path.clone(), proved_key_value); - assert_eq!( - proved_path_key_value, - ProvedPathKeyValue { - path, - key: b"a".to_vec(), - value: vec![5, 6], - proof: [0; 32] - } - ); - } - - #[test] - fn test_many_proved_path_from_many_proved_key_value() { - let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value_a = ProvedKeyValue { - key: b"a".to_vec(), - value: vec![5, 6], - proof: [0; 32], - }; - let proved_key_value_b = ProvedKeyValue { - key: b"b".to_vec(), - value: vec![5, 7], - proof: [1; 32], - }; - let proved_key_value_c = ProvedKeyValue { - key: b"c".to_vec(), - value: vec![6, 7], - proof: [2; 32], - }; - let proved_key_values = vec![proved_key_value_a, proved_key_value_b, proved_key_value_c]; - let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path.clone(), proved_key_values); - assert_eq!(proved_path_key_values.len(), 3); - assert_eq!( - proved_path_key_values[0], - ProvedPathKeyValue { - path: path.clone(), - key: b"a".to_vec(), - value: vec![5, 6], - proof: [0; 32] - } - ); - assert_eq!( - proved_path_key_values[1], - ProvedPathKeyValue { - path: path.clone(), - key: b"b".to_vec(), - value: vec![5, 7], - proof: [1; 32] - } - ); - assert_eq!( - proved_path_key_values[2], - ProvedPathKeyValue { - path, - key: b"c".to_vec(), - value: vec![6, 7], - proof: [2; 32] - } - ); - } -} diff --git a/grovedb/src/operations/proof_old/verify.rs b/grovedb/src/operations/proof_old/verify.rs deleted file mode 100644 index d26ac4ef..00000000 --- a/grovedb/src/operations/proof_old/verify.rs +++ /dev/null @@ -1,917 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Verify proof operations - -use std::{borrow::Cow, collections::BTreeMap}; - -use grovedb_merk::proofs::query::PathKey; -#[cfg(any(feature = "full", feature = "verify"))] -pub use grovedb_merk::proofs::query::{Path, ProvedKeyValue}; -#[cfg(any(feature = "full", feature = "verify"))] -use grovedb_merk::{ - proofs::Query, - tree::{combine_hash, value_hash as value_hash_fn}, - CryptoHash, -}; - -use crate::{ - operations::proof::util::{ - reduce_limit_by, ProvedPathKeyValue, ProvedPathKeyValues, - }, - query_result_type::PathKeyOptionalElementTrio, - versioning::read_and_consume_proof_version, - SizedQuery, -}; -#[cfg(any(feature = "full", feature = "verify"))] -use crate::{ - operations::proof::util::{ - ProofReader, ProofTokenType, ProofTokenType::AbsentPath, EMPTY_TREE_HASH, - }, - Element, Error, GroveDb, PathQuery, -}; - -#[cfg(any(feature = "full", feature = "verify"))] -pub type ProvedKeyValues = Vec; - -#[cfg(any(feature = "full", feature = "verify"))] -type EncounteredAbsence = bool; - -#[cfg(any(feature = "full", feature = "verify"))] -impl GroveDb { - /// Verify proof given a path query - /// Returns the root hash + deserialized elements - pub fn verify_query( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) - } - - /// Verify proof for a given path query returns serialized elements - pub fn verify_query_raw( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, false)?; - - Ok((hash, verifier.result_set)) - } - - /// Verify proof given multiple path queries. - /// If we have more than one path query we merge before performing - /// verification. - pub fn verify_query_many( - proof: &[u8], - query: Vec<&PathQuery>, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - if query.len() > 1 { - let query = PathQuery::merge(query)?; - GroveDb::verify_query_raw(proof, &query) - } else { - GroveDb::verify_query_raw(proof, query[0]) - } - } - - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the deserialized result set. - pub fn verify_subset_query( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_subset_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) - } - - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the serialized result set. - pub fn verify_subset_query_raw( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, true)?; - Ok((hash, verifier.result_set)) - } - - /// Verify non subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_query_with_absence_proof( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_query) - } - - /// Verify subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_subset_query_with_absence_proof( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_subset_query) - } - - /// Verifies the proof and returns both elements in the result set and the - /// elements in query but not in state. - /// Note: This only works for certain path queries. - // TODO: We should not care about terminal keys, as theoretically they can be - // infinite we should perform the absence check solely on the proof and the - // given key, this is a temporary solution - fn verify_with_absence_proof( - proof: &[u8], - query: &PathQuery, - verification_fn: T, - ) -> Result<([u8; 32], Vec), Error> - where - T: Fn(&[u8], &PathQuery) -> Result<([u8; 32], Vec), Error>, - { - // must have a limit - let max_results = query.query.limit.ok_or(Error::NotSupported( - "limits must be set in verify_query_with_absence_proof".to_string(), - ))? as usize; - - // must have no offset - if query.query.offset.is_some() { - return Err(Error::NotSupported( - "offsets are not supported for verify_query_with_absence_proof".to_string(), - )); - } - - let terminal_keys = query.terminal_keys(max_results)?; - - // need to actually verify the query - let (root_hash, result_set) = verification_fn(proof, query)?; - - // convert the result set to a btree map - let mut result_set_as_map: BTreeMap> = result_set - .into_iter() - .map(|(path, key, element)| ((path, key), element)) - .collect(); - - let result_set_with_absence: Vec = terminal_keys - .into_iter() - .map(|terminal_key| { - let element = result_set_as_map.remove(&terminal_key).flatten(); - (terminal_key.0, terminal_key.1, element) - }) - .collect(); - - Ok((root_hash, result_set_with_absence)) - } - - /// Verify subset proof with a chain of path query functions. - /// After subset verification with the first path query, the result if - /// passed to the next path query generation function which generates a - /// new path query Apply the new path query, and pass the result to the - /// next ... This is useful for verifying proofs with multiple path - /// queries that depend on one another. - pub fn verify_query_with_chained_path_queries( - proof: &[u8], - first_query: &PathQuery, - chained_path_queries: Vec, - ) -> Result<(CryptoHash, Vec>), Error> - where - C: Fn(Vec) -> Option, - { - let mut results = vec![]; - - let (last_root_hash, elements) = Self::verify_subset_query(proof, first_query)?; - results.push(elements); - - // we should iterate over each chained path queries - for path_query_generator in chained_path_queries { - let new_path_query = path_query_generator(results[results.len() - 1].clone()).ok_or( - Error::InvalidInput("one of the path query generators returns no path query"), - )?; - let (new_root_hash, new_elements) = Self::verify_subset_query(proof, &new_path_query)?; - if new_root_hash != last_root_hash { - return Err(Error::InvalidProof(format!( - "root hash for different path queries do no match, first is {}, this one is {}", - hex::encode(last_root_hash), - hex::encode(new_root_hash) - ))); - } - results.push(new_elements); - } - - Ok((last_root_hash, results)) - } -} - -#[cfg(any(feature = "full", feature = "verify"))] -/// Proof verifier -struct ProofVerifier { - limit: Option, - result_set: ProvedPathKeyValues, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl ProofVerifier { - /// New query - pub fn new(query: &PathQuery) -> Self { - ProofVerifier { - limit: query.query.limit, - result_set: vec![], - } - } - - /// Execute proof - pub fn execute_proof( - &mut self, - proof: &[u8], - query: &PathQuery, - is_verbose: bool, - ) -> Result<[u8; 32], Error> { - let (_proof_version, proof) = read_and_consume_proof_version(proof)?; - let mut proof_reader = ProofReader::new_with_verbose_status(proof, is_verbose); - - let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); - let mut query = Cow::Borrowed(query); - - // TODO: refactor and add better comments - // if verbose, the first thing we want to do is read the path info - if is_verbose { - let original_path = proof_reader.read_path_info()?; - - if original_path == path_slices { - // do nothing - } else if original_path.len() > path_slices.len() { - // TODO: can we relax this constraint - return Err(Error::InvalidProof( - "original path query path must not be greater than the subset path len" - .to_string(), - )); - } else { - let original_path_in_new_path = original_path - .iter() - .all(|key| path_slices.contains(&key.as_slice())); - - if !original_path_in_new_path { - return Err(Error::InvalidProof( - "the original path should be a subset of the subset path".to_string(), - )); - } else { - // We construct a new path query - let path_not_common = path_slices[original_path.len()..].to_vec(); - let mut path_iter = path_not_common.iter(); - - let mut new_query = Query::new(); - if path_iter.len() >= 1 { - new_query - .insert_key(path_iter.next().expect("confirmed has value").to_vec()); - } - - // need to add the first key to the query - new_query.set_subquery_path(path_iter.map(|a| a.to_vec()).collect()); - new_query.set_subquery(query.query.query.clone()); - - query = Cow::Owned(PathQuery::new( - original_path, - SizedQuery::new(new_query, query.query.limit, query.query.offset), - )); - } - } - } - - let (proof_token_type, proof, _) = proof_reader.read_proof()?; - - let root_hash = if proof_token_type == AbsentPath { - self.verify_absent_path(&mut proof_reader, path_slices)? - } else { - let path_owned = query.path.iter().map(|a| a.to_vec()).collect(); - let mut last_subtree_root_hash = self.execute_subquery_proof( - proof_token_type, - proof, - &mut proof_reader, - query.as_ref(), - path_owned, - )?; - - // validate the path elements are connected - self.verify_path_to_root( - query.as_ref(), - query.path.iter().map(|a| a.as_ref()).collect(), - &mut proof_reader, - &mut last_subtree_root_hash, - )? - }; - - Ok(root_hash) - } - - fn execute_subquery_proof( - &mut self, - proof_token_type: ProofTokenType, - proof: Vec, - proof_reader: &mut ProofReader, - query: &PathQuery, - path: Path, - ) -> Result<[u8; 32], Error> { - let last_root_hash: [u8; 32]; - - match proof_token_type { - ProofTokenType::SizedMerk => { - // verify proof with limit and offset values - let verification_result = self.execute_merk_proof( - ProofTokenType::SizedMerk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = verification_result.0; - } - ProofTokenType::Merk => { - // for non leaf subtrees, we want to prove that all the queried keys - // have an accompanying proof as long as the limit is non zero - // and their child subtree is not empty - let (proof_root_hash, children) = self.execute_merk_proof( - ProofTokenType::Merk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = proof_root_hash; - let children = children.ok_or(Error::InvalidProof( - "MERK_PROOF always returns a result set".to_string(), - ))?; - - for proved_path_key_value in children { - let ProvedPathKeyValue { - path, - key, - value: value_bytes, - proof: value_hash, - } = proved_path_key_value; - let child_element = Element::deserialize(value_bytes.as_slice())?; - match child_element { - Element::Tree(expected_root_key, _) - | Element::SumTree(expected_root_key, ..) => { - let mut expected_combined_child_hash = value_hash; - let mut current_value_bytes = value_bytes; - - if self.limit == Some(0) { - // we are done verifying the subqueries - break; - } - - let (subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query( - &query.query, - key.as_slice(), - ); - - if subquery_value.is_none() && subquery_path.is_none() { - // add this element to the result set - reduce_limit_by( - &mut self.limit, - 1, - ); - - self.result_set.push( - ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: current_value_bytes, - proof: value_hash, - }, - ), - ); - - continue; - } - - // What is the equivalent for an empty tree - if expected_root_key.is_none() { - // child node is empty, move on to next - continue; - } - - // update the path, we are about to perform a subquery call - let mut new_path = path.to_owned(); - new_path.push(key); - - if subquery_path.is_some() - && !subquery_path.as_ref().unwrap().is_empty() - { - if subquery_value.is_none() { - self.verify_subquery_path( - proof_reader, - ProofTokenType::SizedMerk, - &mut subquery_path.expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - continue; - } else { - let (_, result_set_opt, encountered_absence) = self - .verify_subquery_path( - proof_reader, - ProofTokenType::Merk, - &mut subquery_path - .expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - - if encountered_absence { - // we hit an absence proof while verifying the subquery path - continue; - } - - let subquery_path_result_set = result_set_opt; - if subquery_path_result_set.is_none() { - // this means a sized proof was generated for the subquery - // key - // which is invalid as there exists a subquery value - return Err(Error::InvalidProof( - "expected unsized proof for subquery path as subquery \ - value exists" - .to_string(), - )); - } - let subquery_path_result_set = - subquery_path_result_set.expect("confirmed exists above"); - - if subquery_path_result_set.is_empty() { - // we have a valid proof that shows the absence of the - // subquery path in the tree, hence the subquery value - // cannot be applied, move on to the next. - continue; - } - - Self::update_root_key_from_subquery_path_element( - &mut expected_combined_child_hash, - &mut current_value_bytes, - &subquery_path_result_set, - )?; - } - } - - let new_path_query = - PathQuery::new_unsized(vec![], subquery_value.unwrap()); - - let (child_proof_token_type, child_proof) = proof_reader - .read_next_proof(new_path.last().unwrap_or(&Default::default()))?; - - let child_hash = self.execute_subquery_proof( - child_proof_token_type, - child_proof, - proof_reader, - &new_path_query, - new_path, - )?; - - let combined_child_hash = combine_hash( - value_hash_fn(¤t_value_bytes).value(), - &child_hash, - ) - .value() - .to_owned(); - - if combined_child_hash != expected_combined_child_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_combined_child_hash) - ))); - } - } - _ => { - // encountered a non tree element, we can't apply a subquery to it - // add it to the result set. - if self.limit == Some(0) { - break; - } - - reduce_limit_by(&mut self.limit, 1); - - self.result_set - .push(ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: value_bytes, - proof: value_hash, - }, - )); - } - } - } - } - ProofTokenType::EmptyTree => { - last_root_hash = EMPTY_TREE_HASH; - } - t => { - // execute_subquery_proof only expects proofs for merk trees - // root proof is handled separately - return Err(Error::InvalidProof(format!( - "wrong proof type, expected sized merk, merk or empty tree but got {}", - t - ))); - } - } - Ok(last_root_hash) - } - - /// Deserialize subkey_element and update expected root hash and element - /// value - fn update_root_key_from_subquery_path_element( - expected_child_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - subquery_path_result_set: &[ProvedPathKeyValue], - ) -> Result<(), Error> { - let elem_value = &subquery_path_result_set[0].value; - let subquery_path_element = Element::deserialize(elem_value) - .map_err(|_| Error::CorruptedData("failed to deserialize element".to_string()))?; - match subquery_path_element { - Element::Tree(..) | Element::SumTree(..) => { - *expected_child_hash = subquery_path_result_set[0].proof; - *current_value_bytes = subquery_path_result_set[0].value.to_owned(); - } - e => { - // the means that the subquery path pointed to a non tree - // element, this is not valid as you cannot apply the - // the subquery value to non tree items - return Err(Error::InvalidProof(format!( - "subquery path cannot point to non tree element, got {}", - e.type_str() - ))); - } - } - Ok(()) - } - - /// Checks that a valid proof showing the existence or absence of the - /// subquery path is present - fn verify_subquery_path( - &mut self, - proof_reader: &mut ProofReader, - expected_proof_token_type: ProofTokenType, - subquery_path: &mut Path, - expected_root_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - current_path: &mut Path, - ) -> Result<(CryptoHash, Option, EncounteredAbsence), Error> { - // the subquery path contains at least one item. - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subquery_key in subquery_path.iter() { - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - // intermediate proofs are all going to be unsized merk proofs - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected MERK proof type for intermediate subquery path keys, got {}", - proof_token_type - ))); - } - match proof_token_type { - ProofTokenType::Merk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(subquery_key.to_owned()); - current_path.push(subquery_key.to_owned()); - - let (proof_root_hash, result_set) = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - // should always be some as we force the proof type to be MERK - debug_assert!(result_set.is_some(), "{}", true); - - // result_set being empty means we could not find the given key in the subtree - // which essentially means an absence proof - if result_set - .as_ref() - .expect("result set should always be some for merk proof type") - .is_empty() - { - return Ok((proof_root_hash, None, true)); - } - - // verify that the elements in the subquery path are linked by root hashes. - let combined_child_hash = - combine_hash(value_hash_fn(current_value_bytes).value(), &proof_root_hash) - .value() - .to_owned(); - - if combined_child_hash != *expected_root_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_root_hash) - ))); - } - - // after confirming they are linked use the latest hash values for subsequent - // checks - Self::update_root_key_from_subquery_path_element( - expected_root_hash, - current_value_bytes, - &result_set.expect("confirmed is some"), - )?; - } - t => { - return Err(Error::InvalidProof(format!( - "expected merk of sized merk proof type for subquery path, got {}", - t - ))); - } - } - } - - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - if proof_token_type != expected_proof_token_type { - return Err(Error::InvalidProof(format!( - "unexpected proof type for subquery path, expected {}, got {}", - expected_proof_token_type, proof_token_type - ))); - } - - match proof_token_type { - ProofTokenType::Merk | ProofTokenType::SizedMerk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key.to_owned()); - - let verification_result = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - current_path.push(last_key); - - Ok((verification_result.0, verification_result.1, false)) - } - t => Err(Error::InvalidProof(format!( - "expected merk or sized merk proof type for subquery path, got {}", - t - ))), - } - } - - fn verify_absent_path( - &mut self, - proof_reader: &mut ProofReader, - path_slices: Vec<&[u8]>, - ) -> Result<[u8; 32], Error> { - let mut root_key_hash = None; - let mut expected_child_hash = None; - let mut last_result_set: ProvedPathKeyValues = vec![]; - - for key in path_slices { - let (proof_token_type, merk_proof, _) = proof_reader.read_proof()?; - if proof_token_type == ProofTokenType::EmptyTree { - // when we encounter the empty tree op, we need to ensure - // that the expected tree hash is the combination of the - // Element_value_hash and the empty root hash [0; 32] - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &[0; 32], - ) - .unwrap(); - if Some(combined_hash) != expected_child_hash { - return Err(Error::InvalidProof( - "proof invalid: could not verify empty subtree while generating absent \ - path proof" - .to_string(), - )); - } else { - last_result_set = vec![]; - break; - } - } else if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected a merk proof for absent path, got {}", - proof_token_type - ))); - } - - let mut child_query = Query::new(); - child_query.insert_key(key.to_vec()); - - // TODO: don't pass empty vec - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &merk_proof, - &child_query, - true, - // cannot return a result set - Vec::new(), - )?; - - if let Some(expected_child_hash) = expected_child_hash { - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &proof_result.0, - ) - .value() - .to_owned(); - if combined_hash != expected_child_hash { - return Err(Error::InvalidProof(format!( - "proof invalid: invalid parent, expected {}, got {}", - hex::encode(expected_child_hash), - hex::encode(combined_hash) - ))); - } - } else { - root_key_hash = Some(proof_result.0); - } - - last_result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if last_result_set.is_empty() { - // if result set is empty then we have reached the absence point, break - break; - } - - let elem = Element::deserialize(last_result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(Some(last_result_set[0].proof)), - e => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - e.type_str() - ))), - }?; - expected_child_hash = child_hash; - } - - if last_result_set.is_empty() { - if let Some(hash) = root_key_hash { - Ok(hash) - } else { - Err(Error::InvalidProof( - "proof invalid: no non root tree found".to_string(), - )) - } - } else { - Err(Error::InvalidProof( - "proof invalid: path not absent".to_string(), - )) - } - } - - /// Verifies that the correct proof was provided to confirm the path in - /// query - fn verify_path_to_root( - &mut self, - query: &PathQuery, - path_slices: Vec<&[u8]>, - proof_reader: &mut ProofReader, - expected_root_hash: &mut [u8; 32], - ) -> Result<[u8; 32], Error> { - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - // for every subtree, there should be a corresponding proof for the parent - // which should prove that this subtree is a child of the parent tree - let (proof_token_type, parent_merk_proof) = - proof_reader.read_next_proof(path_slice.last().unwrap_or(&Default::default()))?; - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "wrong data_type expected Merk Proof, got {}", - proof_token_type - ))); - } - - let mut parent_query = Query::new(); - parent_query.insert_key(key.to_vec()); - - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &parent_merk_proof, - &parent_query, - query.query.query.left_to_right, - // TODO: don't pass empty vec - Vec::new(), - )?; - - let result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if result_set.is_empty() || &result_set[0].key != key { - return Err(Error::InvalidProof( - "proof invalid: invalid parent".to_string(), - )); - } - - let elem = Element::deserialize(result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(result_set[0].proof), - t => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - t.type_str() - ))), - }?; - - let combined_root_hash = combine_hash( - value_hash_fn(&result_set[0].value).value(), - expected_root_hash, - ) - .value() - .to_owned(); - if child_hash != combined_root_hash { - return Err(Error::InvalidProof(format!( - "Bad path: tree hash does not have expected hash, got {}, expected {}", - hex::encode(child_hash), - hex::encode(combined_root_hash) - ))); - } - - *expected_root_hash = proof_result.0; - - split_path = path_slice.split_last(); - } - - Ok(*expected_root_hash) - } - - /// Execute a merk proof, update the state when a sized proof is - /// encountered i.e. update the limit, offset and result set values - fn execute_merk_proof( - &mut self, - proof_token_type: ProofTokenType, - proof: &[u8], - query: &Query, - left_to_right: bool, - path: Path, - ) -> Result<(CryptoHash, Option), Error> { - let is_sized_proof = proof_token_type == ProofTokenType::SizedMerk; - let mut limit = None; - - if is_sized_proof { - limit = self.limit; - } - - let (hash, result) = - grovedb_merk::execute_proof(proof, query, limit, left_to_right) - .unwrap() - .map_err(|e| { - eprintln!("{e}"); - Error::InvalidProof("invalid proof verification parameters".to_string()) - })?; - - // convert the result set to proved_path_key_values - let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path, result.result_set); - - if is_sized_proof { - self.limit = result.limit; - self.result_set.extend(proved_path_key_values); - Ok((hash, None)) - } else { - Ok((hash, Some(proved_path_key_values))) - } - } -} diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 9be818e0..49f88b0c 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -33,12 +33,6 @@ pub struct PathQuery { pub query: SizedQuery, } -/// Do we go from left to right -pub type LeftToRight = bool; - -/// Do we have subqueries -pub type HasSubqueries = bool; - #[cfg(any(feature = "full", feature = "verify"))] impl fmt::Display for PathQuery { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -282,13 +276,13 @@ impl PathQuery { } } - pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option { + pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option { fn recursive_query_items<'b>( query: &'b Query, path: &[&[u8]], - ) -> Option> { + ) -> Option> { if path.is_empty() { - return Some(InternalCowItemsQuery::from_query(query)); + return Some(SinglePathSubquery::from_query(query)); } let key = path[0]; @@ -306,14 +300,14 @@ impl PathQuery { { return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &subquery_branch.subquery { - Some(InternalCowItemsQuery::from_query(subquery)) + Some(SinglePathSubquery::from_query(subquery)) } else { None } } else { let last_path_item = path.len() == subquery_path.len(); let has_subquery = subquery_branch.subquery.is_some(); - Some(InternalCowItemsQuery::from_key_when_in_path( + Some(SinglePathSubquery::from_key_when_in_path( &subquery_path[path_after_top_removed.len()], last_path_item, has_subquery, @@ -357,14 +351,14 @@ impl PathQuery { // we should get return if path_after_top_removed.len() == subquery_path.len() { if let Some(subquery) = &query.default_subquery_branch.subquery { - Some(InternalCowItemsQuery::from_query(subquery)) + Some(SinglePathSubquery::from_query(subquery)) } else { None } } else { let last_path_item = path.len() == subquery_path.len(); let has_subquery = query.default_subquery_branch.subquery.is_some(); - Some(InternalCowItemsQuery::from_key_when_in_path( + Some(SinglePathSubquery::from_key_when_in_path( &subquery_path[path_after_top_removed.len()], last_path_item, has_subquery, @@ -397,7 +391,7 @@ impl PathQuery { match given_path_len.cmp(&self_path_len) { Ordering::Less => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some(InternalCowItemsQuery::from_key_when_in_path( + Some(SinglePathSubquery::from_key_when_in_path( &self.path[given_path_len], false, true, @@ -408,7 +402,7 @@ impl PathQuery { } Ordering::Equal => { if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { - Some(InternalCowItemsQuery::from_path_query(self)) + Some(SinglePathSubquery::from_path_query(self)) } else { None } @@ -425,7 +419,7 @@ impl PathQuery { #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, Clone, PartialEq)] -pub(crate) enum HasSubquery<'a> { +pub enum HasSubquery<'a> { NoSubquery, Always, Conditionally(Cow<'a, IndexMap>), @@ -465,7 +459,7 @@ impl<'a> HasSubquery<'a> { /// subquery information #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, Clone, PartialEq)] -pub(crate) struct InternalCowItemsQuery<'a> { +pub struct SinglePathSubquery<'a> { /// Items pub items: Cow<'a, Vec>, /// Default subquery branch @@ -477,7 +471,7 @@ pub(crate) struct InternalCowItemsQuery<'a> { } #[cfg(any(feature = "full", feature = "verify"))] -impl<'a> fmt::Display for InternalCowItemsQuery<'a> { +impl<'a> fmt::Display for SinglePathSubquery<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "InternalCowItemsQuery {{")?; writeln!(f, " items: [")?; @@ -495,12 +489,7 @@ impl<'a> fmt::Display for InternalCowItemsQuery<'a> { } } -impl<'a> InternalCowItemsQuery<'a> { - /// Checks to see if we have a subquery on a specific key - pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { - self.has_subquery.has_subquery_on_key(key) - } - +impl<'a> SinglePathSubquery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_or_matching_in_path_on_key(&self, key: &[u8]) -> bool { if self.has_subquery.has_subquery_on_key(key) { @@ -518,7 +507,7 @@ impl<'a> InternalCowItemsQuery<'a> { key: &'a Vec, subquery_is_last_path_item: bool, subquery_has_inner_subquery: bool, - ) -> InternalCowItemsQuery<'a> { + ) -> SinglePathSubquery<'a> { // in this case there should be no in_path, because we are trying to get this // level of items and nothing underneath let in_path = if subquery_is_last_path_item && !subquery_has_inner_subquery { @@ -526,7 +515,7 @@ impl<'a> InternalCowItemsQuery<'a> { } else { Some(Borrowed(key)) }; - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(key.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -534,11 +523,11 @@ impl<'a> InternalCowItemsQuery<'a> { } } - pub fn from_path_query(path_query: &PathQuery) -> InternalCowItemsQuery { + pub fn from_path_query(path_query: &PathQuery) -> SinglePathSubquery { Self::from_query(&path_query.query.query) } - pub fn from_query(query: &Query) -> InternalCowItemsQuery { + pub fn from_query(query: &Query) -> SinglePathSubquery { let has_subquery = if query.default_subquery_branch.subquery.is_some() || query.default_subquery_branch.subquery_path.is_some() { @@ -548,7 +537,7 @@ impl<'a> InternalCowItemsQuery<'a> { } else { HasSubquery::NoSubquery }; - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Borrowed(&query.items), has_subquery, left_to_right: query.left_to_right, @@ -569,7 +558,7 @@ mod tests { use indexmap::IndexMap; use crate::{ - query::{HasSubquery, HasSubquery::NoSubquery, InternalCowItemsQuery}, + query::{HasSubquery, SinglePathSubquery}, query_result_type::QueryResultType, tests::{common::compare_result_tuples, make_deep_tree, TEST_LEAF}, Element, GroveDb, PathQuery, SizedQuery, @@ -1213,7 +1202,7 @@ mod tests { assert_eq!( first, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(root_path_key_2.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1231,7 +1220,7 @@ mod tests { assert_eq!( second, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(root_item_key.clone())]), has_subquery: HasSubquery::Always, /* This is correct because there's a * subquery for one item */ @@ -1254,7 +1243,7 @@ mod tests { assert_eq!( third, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_1.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1277,7 +1266,7 @@ mod tests { assert_eq!( fourth, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_2.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1301,7 +1290,7 @@ mod tests { assert_eq!( fifth, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(subquery_item_key.clone())]), has_subquery: HasSubquery::Always, /* This means that we should be able to * add items underneath */ @@ -1327,7 +1316,7 @@ mod tests { assert_eq!( sixth, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(inner_subquery_path_key.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1371,7 +1360,7 @@ mod tests { assert_eq!( first, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), has_subquery: HasSubquery::Always, left_to_right: true, @@ -1389,7 +1378,7 @@ mod tests { assert_eq!( second, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(quantum_key.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1439,7 +1428,7 @@ mod tests { assert_eq!( second, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::Key(zero_vec.clone())]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, @@ -1596,7 +1585,7 @@ mod tests { assert_eq!( query, - InternalCowItemsQuery { + SinglePathSubquery { items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), has_subquery: HasSubquery::NoSubquery, left_to_right: true, diff --git a/grovedb/src/tests/common.rs b/grovedb/src/tests/common.rs index 367cde5b..2fe8dfde 100644 --- a/grovedb/src/tests/common.rs +++ b/grovedb/src/tests/common.rs @@ -2,10 +2,7 @@ use grovedb_path::SubtreePath; -use crate::{ - operations::proof::util::{ProvedPathKeyOptionalValues, ProvedPathKeyValues}, - Element, Error, -}; +use crate::{operations::proof::util::ProvedPathKeyValues, Element, Error}; /// Compare result tuples pub fn compare_result_tuples( @@ -19,17 +16,6 @@ pub fn compare_result_tuples( } } -pub fn compare_optional_value_result_tuples( - result_set: ProvedPathKeyOptionalValues, - expected_result_set: Vec<(Vec, Option>)>, -) { - assert_eq!(expected_result_set.len(), result_set.len()); - for i in 0..expected_result_set.len() { - assert_eq!(expected_result_set[i].0, result_set[i].key); - assert_eq!(expected_result_set[i].1, result_set[i].value); - } -} - fn deserialize_and_extract_item_bytes(raw_bytes: &[u8]) -> Result, Error> { let elem = Element::deserialize(raw_bytes)?; match elem { @@ -48,21 +34,4 @@ pub fn compare_result_sets(elements: &Vec>, result_set: &ProvedPathKeyVa } } -/// Compare result sets -pub fn compare_optional_value_result_sets( - elements: &Vec>>, - result_set: &ProvedPathKeyOptionalValues, -) { - for i in 0..elements.len() { - assert_eq!( - result_set[i] - .value - .as_ref() - .map(|a| deserialize_and_extract_item_bytes(a) - .expect("expected to extract item bytes")), - elements[i] - ) - } -} - pub(crate) const EMPTY_PATH: SubtreePath<'static, [u8; 0]> = SubtreePath::empty(); diff --git a/grovedb/src/versioning.rs b/grovedb/src/versioning.rs deleted file mode 100644 index 5a724afc..00000000 --- a/grovedb/src/versioning.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::io::Cursor; - -use integer_encoding::{VarInt, VarIntReader}; - -use crate::Error; - -pub(crate) const PROOF_VERSION: u32 = 1; - -/// Reads a version number from the given byte slice using variable-length -/// encoding. Returns a Result containing the parsed u32 version number, or an -/// Error if the data is corrupted and could not be read. -pub fn read_proof_version(mut bytes: &[u8]) -> Result { - bytes - .read_varint() - .map_err(|_| Error::CorruptedData("could not read version info".to_string())) -} - -/// Reads a version number from the given byte slice using variable-length -/// encoding, and returns the version number as well as a slice of the remaining -/// bytes. -pub fn read_and_consume_proof_version(bytes: &[u8]) -> Result<(u32, &[u8]), Error> { - let mut cursor = Cursor::new(bytes); - let version_number = cursor - .read_varint() - .map_err(|_| Error::CorruptedData("sdfs".to_string()))?; - let version_length: usize = cursor.position() as usize; - Ok((version_number, &bytes[version_length..])) -} - -/// Encodes the given version number as variable-length bytes and adds it to the -/// beginning of the given Vec, returning the modified vector. -pub(crate) fn prepend_version_to_bytes(mut bytes: Vec, version: u32) -> Result, Error> { - let version_bytes = version.encode_var_vec(); - bytes.splice(..0, version_bytes); - Ok(bytes) -} - -#[cfg(test)] -mod tests { - - use crate::versioning::{ - prepend_version_to_bytes, read_and_consume_proof_version, read_proof_version, - }; - - #[test] - fn read_correct_version() { - let data = vec![1, 2, 3]; - let version = 500_u32; - - // prepend the version information to the data vector - let new_data = prepend_version_to_bytes(data, version).unwrap(); - assert_eq!(new_data, [244, 3, 1, 2, 3]); - - // show that read_version doesn't consume - assert_eq!(read_proof_version(new_data.as_slice()).unwrap(), 500); - assert_eq!(new_data, [244, 3, 1, 2, 3]); - - // show that we consume the version number and return the remaining vector - let (version_number, data_vec) = read_and_consume_proof_version(&new_data).unwrap(); - assert_eq!(version_number, 500_u32); - assert_eq!(data_vec, [1, 2, 3]); - } -} diff --git a/merk/Cargo.toml b/merk/Cargo.toml index d7864897..7b050c9c 100644 --- a/merk/Cargo.toml +++ b/merk/Cargo.toml @@ -55,6 +55,7 @@ optional = true [features] default = ["full"] +proof_debug = [] full = ["rand", "time", "hex", diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 6ec23177..b38472a8 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -135,29 +135,6 @@ impl fmt::Display for Query { } } -macro_rules! compare_result_tuples_not_optional { - ($result_set:expr, $expected_result_set:expr) => { - assert_eq!( - $expected_result_set.len(), - $result_set.len(), - "Result set lengths do not match" - ); - for i in 0..$expected_result_set.len() { - assert_eq!( - $expected_result_set[i].0, $result_set[i].key, - "Key mismatch at index {}", - i - ); - assert_eq!( - &$expected_result_set[i].1, - $result_set[i].value.as_ref().expect("expected value"), - "Value mismatch at index {}", - i - ); - } - }; -} - #[cfg(any(feature = "full", feature = "verify"))] impl Query { /// Creates a new query which contains no items. @@ -839,12 +816,35 @@ where #[cfg(test)] mod test { + macro_rules! compare_result_tuples_not_optional { + ($result_set:expr, $expected_result_set:expr) => { + assert_eq!( + $expected_result_set.len(), + $result_set.len(), + "Result set lengths do not match" + ); + for i in 0..$expected_result_set.len() { + assert_eq!( + $expected_result_set[i].0, $result_set[i].key, + "Key mismatch at index {}", + i + ); + assert_eq!( + &$expected_result_set[i].1, + $result_set[i].value.as_ref().expect("expected value"), + "Value mismatch at index {}", + i + ); + } + }; + } + use super::{ super::{encoding::encode_into, *}, *, }; use crate::{ - proofs::query::{query_item::QueryItem::RangeAfter, verify, verify::ProvedKeyValue}, + proofs::query::{query_item::QueryItem::RangeAfter, verify}, test_utils::make_tree_seq, tree::{NoopCommit, PanicSource, RefWalker, TreeNode}, TreeFeatureType::BasicMerkNode, diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 20c67541..fae1994e 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -266,19 +266,31 @@ impl Query { match node { Node::KV(key, value) => { - println!("Processing KV node"); + #[cfg(feature = "proof_debug")] + { + println!("Processing KV node"); + } execute_node(key, Some(value), value_hash(value).unwrap())?; } Node::KVValueHash(key, value, value_hash) => { - println!("Processing KVValueHash node"); + #[cfg(feature = "proof_debug")] + { + println!("Processing KVValueHash node"); + } execute_node(key, Some(value), *value_hash)?; } Node::KVDigest(key, value_hash) => { - println!("Processing KVDigest node"); + #[cfg(feature = "proof_debug")] + { + println!("Processing KVDigest node"); + } execute_node(key, None, *value_hash)?; } Node::KVRefValueHash(key, value, value_hash) => { - println!("Processing KVRefValueHash node"); + #[cfg(feature = "proof_debug")] + { + println!("Processing KVRefValueHash node"); + } execute_node(key, Some(value), *value_hash)?; } Node::Hash(_) | Node::KVHash(_) | Node::KVValueHashFeatureType(..) => { From c0f6aad3c828685b1abec44d9886d94ea8de36cb Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:45:09 +0700 Subject: [PATCH 23/34] clippy fixes --- merk/src/merk/restore.rs | 16 ++++----- merk/src/proofs/chunk/util.rs | 18 +++++----- merk/src/proofs/mod.rs | 4 +-- merk/src/proofs/query/map.rs | 64 +++++++++++++++++------------------ merk/src/proofs/query/mod.rs | 8 ++--- 5 files changed, 55 insertions(+), 55 deletions(-) diff --git a/merk/src/merk/restore.rs b/merk/src/merk/restore.rs index 9e26b1af..ab4d77d3 100644 --- a/merk/src/merk/restore.rs +++ b/merk/src/merk/restore.rs @@ -711,7 +711,7 @@ mod tests { // apply second chunk let new_chunk_ids = restorer .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), + &traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk, ) .unwrap(); @@ -727,7 +727,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap(); // apply second chunk let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), + &traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk, ); assert!(chunk_process_result.is_err()); @@ -740,7 +740,7 @@ mod tests { // but let's apply it to the wrong place let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap(); let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), + &traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk, ); assert!(chunk_process_result.is_err()); @@ -756,7 +756,7 @@ mod tests { // apply second chunk let new_chunk_ids = restorer .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![RIGHT, RIGHT]), + &traversal_instruction_as_vec_bytes(&[RIGHT, RIGHT]), chunk, ) .unwrap(); @@ -773,7 +773,7 @@ mod tests { // apply second chunk let new_chunk_ids = restorer .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), + &traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk, ) .unwrap(); @@ -790,7 +790,7 @@ mod tests { // apply second chunk let new_chunk_ids = restorer .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT]), + &traversal_instruction_as_vec_bytes(&[RIGHT, LEFT]), chunk, ) .unwrap(); @@ -1035,7 +1035,7 @@ mod tests { // first restore the first chunk let (chunk, next_chunk_index) = chunk_producer.chunk_with_index(1).unwrap(); let new_chunk_ids = restorer - .process_chunk(&traversal_instruction_as_vec_bytes(&vec![]), chunk) + .process_chunk(&traversal_instruction_as_vec_bytes(&[]), chunk) .expect("should process chunk"); assert_eq!(new_chunk_ids.len(), 4); assert_eq!(next_chunk_index, Some(2)); @@ -1273,7 +1273,7 @@ mod tests { // first restore the first chunk let (chunk, next_chunk_index) = chunk_producer.chunk_with_index(1).unwrap(); let new_chunk_ids = restorer - .process_chunk(&traversal_instruction_as_vec_bytes(&vec![]), chunk) + .process_chunk(&traversal_instruction_as_vec_bytes(&[]), chunk) .expect("should process chunk"); assert_eq!(new_chunk_ids.len(), 4); assert_eq!(next_chunk_index, Some(2)); diff --git a/merk/src/proofs/chunk/util.rs b/merk/src/proofs/chunk/util.rs index 39c513b7..fab2024a 100644 --- a/merk/src/proofs/chunk/util.rs +++ b/merk/src/proofs/chunk/util.rs @@ -573,11 +573,11 @@ mod test { #[test] fn test_traversal_instruction_as_string() { - assert_eq!(traversal_instruction_as_vec_bytes(&vec![]), vec![]); - assert_eq!(traversal_instruction_as_vec_bytes(&vec![LEFT]), vec![1u8]); - assert_eq!(traversal_instruction_as_vec_bytes(&vec![RIGHT]), vec![0u8]); + assert_eq!(traversal_instruction_as_vec_bytes(&[]), vec![]); + assert_eq!(traversal_instruction_as_vec_bytes(&[LEFT]), vec![1u8]); + assert_eq!(traversal_instruction_as_vec_bytes(&[RIGHT]), vec![0u8]); assert_eq!( - traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT, LEFT, RIGHT]), + traversal_instruction_as_vec_bytes(&[RIGHT, LEFT, LEFT, RIGHT]), vec![0u8, 1u8, 1u8, 0u8] ); } @@ -585,20 +585,20 @@ mod test { #[test] fn test_instruction_string_to_traversal_instruction() { assert_eq!( - vec_bytes_as_traversal_instruction(&vec![1u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[1u8]).unwrap(), vec![LEFT] ); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![0u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[0u8]).unwrap(), vec![RIGHT] ); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![0u8, 0u8, 1u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[0u8, 0u8, 1u8]).unwrap(), vec![RIGHT, RIGHT, LEFT] ); - assert!(vec_bytes_as_traversal_instruction(&vec![0u8, 0u8, 2u8]).is_err()); + assert!(vec_bytes_as_traversal_instruction(&[0u8, 0u8, 2u8]).is_err()); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![]).unwrap(), + vec_bytes_as_traversal_instruction(&[]).unwrap(), Vec::::new() ); } diff --git a/merk/src/proofs/mod.rs b/merk/src/proofs/mod.rs index bdf2cdb8..45f4b2e9 100644 --- a/merk/src/proofs/mod.rs +++ b/merk/src/proofs/mod.rs @@ -126,8 +126,8 @@ impl fmt::Display for Node { fn hex_to_ascii(hex_value: &[u8]) -> String { if hex_value.len() == 1 && hex_value[0] < b"0"[0] { - hex::encode(&hex_value) + hex::encode(hex_value) } else { - String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(&hex_value)) + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) } } diff --git a/merk/src/proofs/query/map.rs b/merk/src/proofs/query/map.rs index 0718285b..757403a2 100644 --- a/merk/src/proofs/query/map.rs +++ b/merk/src/proofs/query/map.rs @@ -236,6 +236,38 @@ impl<'a> Iterator for Range<'a> { } } +#[cfg(feature = "full")] +/// `BTreeMapExtras` provides extra functionality to work with `BTreeMap` that +/// either missed or unstable +/// NOTE: We can easily remove this when the following feature will be rolled +/// out into stable rust: https://github.com/rust-lang/rust/issues/62924 +trait BTreeMapExtras { + type K; + type V; + + /// Returns `None` if `BTreeMap` is empty otherwise the first key-value pair + /// in the map. The key in this pair is the minimum key in the map. + fn first_key_value(&self) -> Option<(&Self::K, &Self::V)>; + + /// Returns `None` if `BTreeMap` is empty otherwise the last key-value pair + /// in the map. The key in this pair is the maximum key in the map. + fn last_key_value(&self) -> Option<(&Self::K, &Self::V)>; +} + +#[cfg(feature = "full")] +impl BTreeMapExtras for BTreeMap { + type K = KK; + type V = VV; + + fn first_key_value(&self) -> Option<(&Self::K, &Self::V)> { + self.iter().next() + } + + fn last_key_value(&self) -> Option<(&Self::K, &Self::V)> { + self.iter().next_back() + } +} + #[cfg(feature = "full")] #[cfg(test)] mod tests { @@ -368,35 +400,3 @@ mod tests { assert_eq!(range.next().unwrap().unwrap(), (&[1][..], &[1][..])); } } - -#[cfg(feature = "full")] -/// `BTreeMapExtras` provides extra functionality to work with `BTreeMap` that -/// either missed or unstable -/// NOTE: We can easily remove this when the following feature will be rolled -/// out into stable rust: https://github.com/rust-lang/rust/issues/62924 -trait BTreeMapExtras { - type K; - type V; - - /// Returns `None` if `BTreeMap` is empty otherwise the first key-value pair - /// in the map. The key in this pair is the minimum key in the map. - fn first_key_value(&self) -> Option<(&Self::K, &Self::V)>; - - /// Returns `None` if `BTreeMap` is empty otherwise the last key-value pair - /// in the map. The key in this pair is the maximum key in the map. - fn last_key_value(&self) -> Option<(&Self::K, &Self::V)>; -} - -#[cfg(feature = "full")] -impl BTreeMapExtras for BTreeMap { - type K = KK; - type V = VV; - - fn first_key_value(&self) -> Option<(&Self::K, &Self::V)> { - self.iter().next() - } - - fn last_key_value(&self) -> Option<(&Self::K, &Self::V)> { - self.iter().next_back() - } -} diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index b38472a8..37388af4 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -201,7 +201,7 @@ impl Query { } } } - return false; + false } pub fn has_subquery_or_subquery_path_on_key(&self, key: &[u8], in_path: bool) -> bool { @@ -218,7 +218,7 @@ impl Query { } } } - return false; + false } /// Pushes terminal key paths and keys to `result`, no more than @@ -943,7 +943,7 @@ mod test { for (key, expected_value) in keys.iter().zip(expected_result.iter()) { assert_eq!( - values.get(key).map(|a| a.as_ref()).flatten(), + values.get(key).and_then(|a| a.as_ref()), expected_value.as_ref() ); } @@ -4420,7 +4420,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )); let query_vec: Vec = query.into(); - let expected = vec![QueryItem::Range( + let expected = [QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; assert_eq!( From 33c733504dd0293a40420c89fc1fc66e4b2f65fc Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:45:24 +0700 Subject: [PATCH 24/34] fmt --- merk/src/merk/restore.rs | 32 ++++++++------------------------ 1 file changed, 8 insertions(+), 24 deletions(-) diff --git a/merk/src/merk/restore.rs b/merk/src/merk/restore.rs index ab4d77d3..c5ce1286 100644 --- a/merk/src/merk/restore.rs +++ b/merk/src/merk/restore.rs @@ -710,10 +710,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -726,10 +723,8 @@ mod tests { // let's try to apply the second chunk again, should not work let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap(); // apply second chunk - let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), - chunk, - ); + let chunk_process_result = + restorer.process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk); assert!(chunk_process_result.is_err()); assert!(matches!( chunk_process_result, @@ -739,10 +734,8 @@ mod tests { // next let's get a random but expected chunk and work with that e.g. chunk 4 // but let's apply it to the wrong place let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap(); - let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), - chunk, - ); + let chunk_process_result = + restorer.process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk); assert!(chunk_process_result.is_err()); assert!(matches!( chunk_process_result, @@ -755,10 +748,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(5).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&[RIGHT, RIGHT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[RIGHT, RIGHT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -772,10 +762,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(3).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -789,10 +776,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&[RIGHT, LEFT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[RIGHT, LEFT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element From 4f62af2ceb943e4d3508482fd10662e955ece6e9 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:49:03 +0700 Subject: [PATCH 25/34] small fix --- merk/benches/merk.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/merk/benches/merk.rs b/merk/benches/merk.rs index b0f9cca4..ff0fbaef 100644 --- a/merk/benches/merk.rs +++ b/merk/benches/merk.rs @@ -408,7 +408,7 @@ pub fn prove_1m_2k_rand(c: &mut Criterion) { b.iter_with_large_drop(|| { let keys = prove_keys_per_batch[i % n_batches].clone(); - merk.prove_unchecked(keys, None, None, true) + merk.prove_unchecked(keys, None, true) .unwrap() .expect("prove failed"); i += 1; From 9827e2b570772cdb5d8c5fffd3a1ff5104e5a811 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:50:42 +0700 Subject: [PATCH 26/34] fmt --- grovedb/src/operations/proof/verify.rs | 129 ------------------------- 1 file changed, 129 deletions(-) diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 64a7c2d5..039123ec 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -344,135 +344,6 @@ impl GroveDb { Ok(root_hash) } - // fn verify_completeness( - // &self, - // query_items: &[QueryItem], - // result_set: &[ProvedKeyValue], - // current_path: &[Vec], - // ) -> Result<(), Error> { - // let mut result_iter = result_set.iter().peekable(); - // - // for query_item in query_items { - // match query_item { - // QueryItem::Key(key) => { - // if !self.verify_key_completeness(key, &mut result_iter)? { - // return Err(Error::InvalidProof(format!( - // "Key {:?} is missing and its absence is not proven", - // hex::encode(key) - // ))); - // } - // }, - // QueryItem::Range(range) => { - // self.verify_range_completeness(range, &mut result_iter)?; - // }, - // QueryItem::RangeInclusive(range) => { - // self.verify_range_inclusive_completeness(range, &mut - // result_iter)?; }, - // // Add cases for other QueryItem variants as needed - // _ => return Err(Error::InvalidProof("Unsupported query item - // type".into())), } - // } - // - // // Ensure we've consumed all results - // if result_iter.peek().is_some() { - // return Err(Error::InvalidProof("Proof contains extra, unexpected - // results".into())); } - // - // Ok(()) - // } - // - // fn verify_key_completeness( - // &self, - // key: &[u8], - // result_iter: &mut std::iter::Peekable>, ) -> Result { - // if let Some(result) = result_iter.peek() { - // if result.key == key { - // result_iter.next(); // Consume the result - // Ok(true) - // } else if result.key > key { - // // The key is missing, but this is okay as long as we can prove - // its absence self.verify_key_absence(key, result) - // } else { - // // This shouldn't happen if the result set is properly ordered - // Err(Error::InvalidProof("Result set is not properly - // ordered".into())) } - // } else { - // // We've run out of results, need to prove absence - // Err(Error::InvalidProof("Ran out of results unexpectedly".into())) - // } - // } - // - // fn verify_range_completeness( - // &self, - // range: &Range>, - // result_iter: &mut std::iter::Peekable>, ) -> Result<(), Error> { - // let mut current = range.start.clone(); - // while current < range.end { - // if !self.verify_key_completeness(¤t, result_iter)? { - // return Err(Error::InvalidProof(format!( - // "Key {:?} in range is missing and its absence is not proven", - // hex::encode(¤t) - // ))); - // } - // // Move to next key. This is a simplified approach and might need to - // be adjusted // based on your key structure. - // current = increment_key(¤t); - // } - // Ok(()) - // } - // - // fn verify_range_inclusive_completeness( - // &self, - // range: &RangeInclusive>, - // result_iter: &mut std::iter::Peekable>, ) -> Result<(), Error> { - // let mut current = range.start().clone(); - // while current <= *range.end() { - // if !self.verify_key_completeness(¤t, result_iter)? { - // return Err(Error::InvalidProof(format!( - // "Key {:?} in inclusive range is missing and its absence is - // not proven", hex::encode(¤t) - // ))); - // } - // // Move to next key. This is a simplified approach and might need to - // be adjusted // based on your key structure. - // current = increment_key(¤t); - // } - // Ok(()) - // } - // - // fn verify_key_absence( - // &self, - // key: &[u8], - // next_result: &ProvedKeyValue, - // ) -> Result { - // // This function should implement the logic to verify that a key's - // absence is proven // The exact implementation will depend on how your - // system proves absences // This might involve checking the hash of the - // next present key, verifying that // there's no possible key between - // the absent key and the next present key, etc. - // - // // For now, we'll just return Ok(false) as a placeholder - // Ok(false) - // } - // - // fn increment_key(key: &[u8]) -> Vec { - // // This is a very simplified key incrementing function - // // You might need a more sophisticated approach depending on your key - // structure let mut new_key = key.to_vec(); - // for byte in new_key.iter_mut().rev() { - // if *byte == 255 { - // *byte = 0; - // } else { - // *byte += 1; - // break; - // } - // } - // new_key - // } - pub fn verify_query( proof: &[u8], query: &PathQuery, From 1a28e7536ac725e2f8d0f55198b44fb204b5bed9 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:57:48 +0700 Subject: [PATCH 27/34] reactivated test --- grovedb/src/tests/query_tests.rs | 113 +++++++++++++++++-------------- 1 file changed, 64 insertions(+), 49 deletions(-) diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 2427abd3..e7325a6a 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -1663,14 +1663,14 @@ mod tests { let proof = db.prove_query(&path_query, None).unwrap().unwrap(); let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - println!( - "{}", - result_set - .iter() - .map(|a| a.to_string()) - .collect::>() - .join(" | ") - ); + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join(" | ") + // ); assert_eq!(result_set.len(), 5); compare_result_sets(&elements, &result_set); @@ -1850,14 +1850,14 @@ mod tests { let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - println!( - "{}", - result_set - .iter() - .map(|a| a.to_string()) - .collect::>() - .join(", ") - ); + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join(", ") + // ); assert_eq!(result_set.len(), 5); // TODO: verify that the result set is exactly the same @@ -2014,39 +2014,54 @@ mod tests { .expect("successful subtree insert"); // // if you don't have an item at the subquery path translation, you shouldn't // be // added to the result set. - // let mut query = Query::new(); - // query.insert_all(); - // query.set_subquery_path(vec![b"d".to_vec()]); - // - // let path = vec![TEST_LEAF.to_vec()]; - // - // let path_query = PathQuery::new_unsized(path, query.clone()); - // - // let (elements, _) = db - // .query_raw( - // &path_query, - // false, - // true, - // false, - // QueryResultType::QueryPathKeyElementTrioResultType, - // None, - // ) - // .unwrap() - // .expect("expected successful get_path_query"); - // - // assert_eq!(elements, - // QueryResultElements::from_elements(vec![PathKeyElementTrioResultItem((vec![b" - // test_leaf".to_vec(), b"a".to_vec()], b"d".to_vec(), - // Element::Tree(Some(b"d".to_vec()), None) )), - // PathKeyElementTrioResultItem((vec![b"test_leaf".to_vec(), b"b".to_vec()], - // b"d".to_vec(), Element::Tree(Some(b"j".to_vec()), None) ))])); - // - // let proof = db.prove_query(&path_query, None).unwrap().unwrap(); - // let (hash, result_set) = GroveDb::verify_query_raw(&proof, - // &path_query).unwrap(); assert_eq!(hash, - // db.root_hash(None).unwrap().unwrap()); println!("{}", - // result_set.iter().map(|a| a.to_string()).collect::>().join("| ")); - // assert_eq!(result_set.len(), 2); + let mut query = Query::new(); + query.insert_all(); + query.set_subquery_path(vec![b"d".to_vec()]); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let (elements, _) = db + .query_raw( + &path_query, + false, + true, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!( + elements, + QueryResultElements::from_elements(vec![ + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"a".to_vec()], + b"d".to_vec(), + Element::Tree(Some(b"d".to_vec()), None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec()], + b"d".to_vec(), + Element::Tree(Some(b"j".to_vec()), None) + )) + ]) + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join("| ") + // ); + assert_eq!(result_set.len(), 2); // apply path translation then query let mut query = Query::new(); From c1442bb4952221c844dcc940b8ae868eccc8ae47 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 08:59:47 +0700 Subject: [PATCH 28/34] cleaned up merk --- merk/src/proofs/query/verify.rs | 42 +++++++++++++++++++-------------- merk/src/test_utils/mod.rs | 1 - 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index fae1994e..56902ce8 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -73,16 +73,19 @@ impl Query { limit: Option, left_to_right: bool, ) -> CostResult<(MerkHash, ProofVerificationResult), Error> { - println!( - "executing proof with limit {:?} going {} using query {}", - limit, - if left_to_right { - "left to right" - } else { - "right to left" - }, - self - ); + #[cfg(feature = "proof_debug")] + { + println!( + "executing proof with limit {:?} going {} using query {}", + limit, + if left_to_right { + "left to right" + } else { + "right to left" + }, + self + ); + } let mut cost = OperationCost::default(); let mut output = Vec::with_capacity(self.len()); @@ -235,14 +238,17 @@ impl Query { } } } - println!( - "pushing {}", - ProvedKeyOptionalValue { - key: key.clone(), - value: Some(val.clone()), - proof: value_hash, - } - ); + #[cfg(feature = "proof_debug")] + { + println!( + "pushing {}", + ProvedKeyOptionalValue { + key: key.clone(), + value: Some(val.clone()), + proof: value_hash, + } + ); + } // add data to output output.push(ProvedKeyOptionalValue { key: key.clone(), diff --git a/merk/src/test_utils/mod.rs b/merk/src/test_utils/mod.rs index 49a492e2..d5d76673 100644 --- a/merk/src/test_utils/mod.rs +++ b/merk/src/test_utils/mod.rs @@ -164,7 +164,6 @@ pub fn apply_to_memonly( }) .unwrap() .expect("commit failed"); - println!("{:?}", &tree); assert_tree_invariants(&tree); tree }) From ef6f2e6150aa58ec7754d6a4fb9889b4b7d32164 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 09:00:56 +0700 Subject: [PATCH 29/34] cleaned up merk --- grovedb/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grovedb/Cargo.toml b/grovedb/Cargo.toml index 78ca1302..0deefd5e 100644 --- a/grovedb/Cargo.toml +++ b/grovedb/Cargo.toml @@ -46,7 +46,7 @@ harness = false [features] default = ["full"] -proof_debug = [] +proof_debug = ["grovedb-merk/proof_debug"] full = [ "grovedb-merk/full", "thiserror", From ded505418d3a38186fa9fe50adf980b0a2ed56fe Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 09:12:53 +0700 Subject: [PATCH 30/34] small fix --- grovedb/src/operations/proof/verify.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 039123ec..11bdd754 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -144,7 +144,7 @@ impl GroveDb { "path: {} key: {} element: {}", path_hex_to_ascii(path), hex_to_ascii(key), - e + element_string, ) }) .collect::>() From 196d9fa27fab3fe1af5b224c6e0b04c731eccccf Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 09:47:15 +0700 Subject: [PATCH 31/34] cleaned up verification imports --- grovedb/Cargo.toml | 3 +- grovedb/src/element/helpers.rs | 10 +- grovedb/src/element/mod.rs | 32 +---- grovedb/src/element/query.rs | 9 +- grovedb/src/lib.rs | 4 +- grovedb/src/operations/proof/generate.rs | 154 +---------------------- grovedb/src/operations/proof/mod.rs | 149 +++++++++++++++++++++- grovedb/src/operations/proof/verify.rs | 3 +- grovedb/src/query_result_type.rs | 2 +- merk/Cargo.toml | 6 +- merk/src/lib.rs | 2 +- merk/src/proofs/query/mod.rs | 8 +- merk/src/proofs/query/query_item/mod.rs | 4 +- merk/src/proofs/query/verify.rs | 13 +- 14 files changed, 190 insertions(+), 209 deletions(-) diff --git a/grovedb/Cargo.toml b/grovedb/Cargo.toml index 0deefd5e..28ba9995 100644 --- a/grovedb/Cargo.toml +++ b/grovedb/Cargo.toml @@ -23,7 +23,7 @@ derive_more = { version = "0.99.18" } integer-encoding = { version = "4.0.0", optional = true } grovedb-costs = { version = "1.0.0-rc.2", path = "../costs", optional = true } nohash-hasher = { version = "0.2.0", optional = true } -indexmap = { version = "2.2.6", optional = true } +indexmap = { version = "2.2.6"} intmap = { version = "2.0.0", optional = true } grovedb-path = { version = "1.0.0-rc.2", path = "../path" } grovedbg-types = { path = "../grovedbg-types", optional = true } @@ -57,7 +57,6 @@ full = [ "integer-encoding", "grovedb-costs", "nohash-hasher", - "indexmap", "intmap" ] visualize = [ diff --git a/grovedb/src/element/helpers.rs b/grovedb/src/element/helpers.rs index 222bafbd..59cc2563 100644 --- a/grovedb/src/element/helpers.rs +++ b/grovedb/src/element/helpers.rs @@ -43,15 +43,17 @@ use grovedb_merk::{ #[cfg(feature = "full")] use integer_encoding::VarInt; +#[cfg(feature = "full")] +use crate::reference_path::path_from_reference_path_type; #[cfg(any(feature = "full", feature = "verify"))] -use crate::reference_path::{path_from_reference_path_type, ReferencePathType}; -#[cfg(any(feature = "full", feature = "verify"))] -use crate::{element::SUM_ITEM_COST_SIZE, Element, Error}; +use crate::reference_path::ReferencePathType; #[cfg(feature = "full")] use crate::{ - element::{SUM_TREE_COST_SIZE, TREE_COST_SIZE}, + element::{SUM_ITEM_COST_SIZE, SUM_TREE_COST_SIZE, TREE_COST_SIZE}, ElementFlags, }; +#[cfg(any(feature = "full", feature = "verify"))] +use crate::{Element, Error}; impl Element { #[cfg(any(feature = "full", feature = "verify"))] diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index 90bc34ec..2469ffe2 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Module for subtrees handling. //! Subtrees handling is isolated so basically this module is about adapting //! Merk API to GroveDB needs. @@ -48,8 +20,8 @@ mod query; pub use query::QueryOptions; #[cfg(any(feature = "full", feature = "verify"))] mod serialize; -#[cfg(feature = "full")] -use core::fmt; +#[cfg(any(feature = "full", feature = "verify"))] +use std::fmt; use bincode::{Decode, Encode}; #[cfg(any(feature = "full", feature = "verify"))] diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index 46e2a9ab..2eb8f556 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -38,13 +38,17 @@ use grovedb_costs::{ }; #[cfg(feature = "full")] use grovedb_merk::proofs::query::query_item::QueryItem; +#[cfg(feature = "full")] use grovedb_merk::proofs::query::SubqueryBranch; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; +#[cfg(feature = "full")] use grovedb_path::SubtreePath; #[cfg(feature = "full")] use grovedb_storage::{rocksdb_storage::RocksDbStorage, RawIterator, StorageContext}; +#[cfg(feature = "full")] +use crate::operations::proof::util::hex_to_ascii; #[cfg(feature = "full")] use crate::{ element::helpers::raw_decode, @@ -58,9 +62,8 @@ use crate::{ util::{merk_optional_tx, merk_optional_tx_internal_error, storage_context_optional_tx}, Error, PathQuery, TransactionArg, }; -use crate::{operations::proof::util::hex_to_ascii, query_result_type::Path}; #[cfg(any(feature = "full", feature = "verify"))] -use crate::{Element, SizedQuery}; +use crate::{query_result_type::Path, Element, SizedQuery}; #[cfg(any(feature = "full", feature = "verify"))] #[derive(Copy, Clone, Debug)] @@ -130,6 +133,7 @@ where pub offset: &'a mut Option, } +#[cfg(feature = "full")] fn format_query(query: &Query, indent: usize) -> String { let indent_str = " ".repeat(indent); let mut output = format!("{}Query {{\n", indent_str); @@ -165,6 +169,7 @@ fn format_query(query: &Query, indent: usize) -> String { output } +#[cfg(feature = "full")] fn format_subquery_branch(branch: &SubqueryBranch, indent: usize) -> String { let indent_str = " ".repeat(indent); let mut output = format!("SubqueryBranch {{\n"); diff --git a/grovedb/src/lib.rs b/grovedb/src/lib.rs index d901e83c..206ace71 100644 --- a/grovedb/src/lib.rs +++ b/grovedb/src/lib.rs @@ -201,6 +201,7 @@ use grovedb_merk::{ tree::{combine_hash, value_hash}, BatchEntry, CryptoHash, KVIterator, Merk, }; +#[cfg(feature = "full")] use grovedb_path::SubtreePath; #[cfg(feature = "full")] use grovedb_storage::rocksdb_storage::PrefixedRocksDbImmediateStorageContext; @@ -226,7 +227,7 @@ use crate::element::helpers::raw_decode; pub use crate::error::Error; #[cfg(feature = "full")] use crate::util::{root_merk_optional_tx, storage_context_optional_tx}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use crate::Error::MerkError; #[cfg(feature = "full")] @@ -238,6 +239,7 @@ pub struct GroveDb { db: RocksDbStorage, } +#[cfg(feature = "full")] pub(crate) type SubtreePrefix = [u8; blake3::OUT_LEN]; /// Transaction diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 41a0dbe4..5640a70c 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,19 +1,13 @@ //! Generate proof operations -use std::{collections::BTreeMap, fmt}; +use std::collections::BTreeMap; -use bincode::{Decode, Encode}; -use derive_more::From; use grovedb_costs::{ cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, CostsExt, OperationCost, }; use grovedb_merk::{ - proofs::{ - encode_into, - query::{Key, QueryItem}, - Decoder, Node, Op, - }, + proofs::{encode_into, query::QueryItem, Node, Op}, tree::value_hash, Merk, ProofWithoutEncodingResult, }; @@ -22,151 +16,13 @@ use grovedb_storage::StorageContext; #[cfg(feature = "proof_debug")] use crate::query_result_type::QueryResultType; use crate::{ - operations::proof::util::{element_hex_to_ascii, hex_to_ascii}, + operations::proof::{ + util::hex_to_ascii, GroveDBProof, GroveDBProofV0, LayerProof, ProveOptions, + }, reference_path::path_from_reference_path_type, Element, Error, GroveDb, PathQuery, }; -#[derive(Debug, Clone, Copy, Encode, Decode)] -pub struct ProveOptions { - pub decrease_limit_on_empty_sub_query_result: bool, -} - -impl fmt::Display for ProveOptions { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "ProveOptions {{ decrease_limit_on_empty_sub_query_result: {} }}", - self.decrease_limit_on_empty_sub_query_result - ) - } -} - -impl Default for ProveOptions { - fn default() -> Self { - ProveOptions { - decrease_limit_on_empty_sub_query_result: true, - } - } -} - -#[derive(Encode, Decode)] -pub struct LayerProof { - pub merk_proof: Vec, - pub lower_layers: BTreeMap, -} - -#[derive(Encode, Decode, From)] -pub enum GroveDBProof { - V0(GroveDBProofV0), -} - -#[derive(Encode, Decode)] -pub struct GroveDBProofV0 { - pub root_layer: LayerProof, - pub prove_options: ProveOptions, -} - -impl fmt::Display for LayerProof { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(f, "LayerProof {{")?; - writeln!(f, " merk_proof: {}", decode_merk_proof(&self.merk_proof))?; - if !self.lower_layers.is_empty() { - writeln!(f, " lower_layers: {{")?; - for (key, layer_proof) in &self.lower_layers { - writeln!(f, " {} => {{", hex_to_ascii(key))?; - for line in format!("{}", layer_proof).lines() { - writeln!(f, " {}", line)?; - } - writeln!(f, " }}")?; - } - writeln!(f, " }}")?; - } - write!(f, "}}") - } -} - -impl fmt::Display for GroveDBProof { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - GroveDBProof::V0(proof) => write!(f, "{}", proof), - } - } -} - -impl fmt::Display for GroveDBProofV0 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(f, "GroveDBProofV0 {{")?; - for line in format!("{}", self.root_layer).lines() { - writeln!(f, " {}", line)?; - } - write!(f, "}}") - } -} - -fn decode_merk_proof(proof: &[u8]) -> String { - let mut result = String::new(); - let ops = Decoder::new(proof); - - for (i, op) in ops.enumerate() { - match op { - Ok(op) => { - result.push_str(&format!("\n {}: {}", i, op_to_string(&op))); - } - Err(e) => { - result.push_str(&format!("\n {}: Error decoding op: {}", i, e)); - } - } - } - - result -} - -fn op_to_string(op: &Op) -> String { - match op { - Op::Push(node) => format!("Push({})", node_to_string(node)), - Op::PushInverted(node) => format!("PushInverted({})", node_to_string(node)), - Op::Parent => "Parent".to_string(), - Op::Child => "Child".to_string(), - Op::ParentInverted => "ParentInverted".to_string(), - Op::ChildInverted => "ChildInverted".to_string(), - } -} - -fn node_to_string(node: &Node) -> String { - match node { - Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), - Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), - Node::KV(key, value) => { - format!("KV({}, {})", hex_to_ascii(key), element_hex_to_ascii(value)) - } - Node::KVValueHash(key, value, value_hash) => format!( - "KVValueHash({}, {}, HASH[{}])", - hex_to_ascii(key), - element_hex_to_ascii(value), - hex::encode(value_hash) - ), - Node::KVDigest(key, value_hash) => format!( - "KVDigest({}, HASH[{}])", - hex_to_ascii(key), - hex::encode(value_hash) - ), - Node::KVRefValueHash(key, value, value_hash) => format!( - "KVRefValueHash({}, {}, HASH[{}])", - hex_to_ascii(key), - element_hex_to_ascii(value), - hex::encode(value_hash) - ), - Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( - "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", - hex_to_ascii(key), - element_hex_to_ascii(value), - hex::encode(value_hash), - feature_type - ), - } -} - impl GroveDb { /// Prove one or more path queries. /// If we have more than one path query, we merge into a single path query diff --git a/grovedb/src/operations/proof/mod.rs b/grovedb/src/operations/proof/mod.rs index 7b69b586..306b02d8 100644 --- a/grovedb/src/operations/proof/mod.rs +++ b/grovedb/src/operations/proof/mod.rs @@ -5,5 +5,150 @@ mod generate; pub mod util; mod verify; -#[cfg(feature = "full")] -pub use generate::ProveOptions; +use std::{collections::BTreeMap, fmt}; + +use bincode::{Decode, Encode}; +use derive_more::From; +use grovedb_merk::proofs::{query::Key, Decoder, Node, Op}; + +use crate::operations::proof::util::{element_hex_to_ascii, hex_to_ascii}; + +#[derive(Debug, Clone, Copy, Encode, Decode)] +pub struct ProveOptions { + pub decrease_limit_on_empty_sub_query_result: bool, +} + +impl fmt::Display for ProveOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProveOptions {{ decrease_limit_on_empty_sub_query_result: {} }}", + self.decrease_limit_on_empty_sub_query_result + ) + } +} + +impl Default for ProveOptions { + fn default() -> Self { + ProveOptions { + decrease_limit_on_empty_sub_query_result: true, + } + } +} + +#[derive(Encode, Decode)] +pub struct LayerProof { + pub merk_proof: Vec, + pub lower_layers: BTreeMap, +} + +#[derive(Encode, Decode, From)] +pub enum GroveDBProof { + V0(GroveDBProofV0), +} + +#[derive(Encode, Decode)] +pub struct GroveDBProofV0 { + pub root_layer: LayerProof, + pub prove_options: ProveOptions, +} + +impl fmt::Display for LayerProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "LayerProof {{")?; + writeln!(f, " merk_proof: {}", decode_merk_proof(&self.merk_proof))?; + if !self.lower_layers.is_empty() { + writeln!(f, " lower_layers: {{")?; + for (key, layer_proof) in &self.lower_layers { + writeln!(f, " {} => {{", hex_to_ascii(key))?; + for line in format!("{}", layer_proof).lines() { + writeln!(f, " {}", line)?; + } + writeln!(f, " }}")?; + } + writeln!(f, " }}")?; + } + write!(f, "}}") + } +} + +impl fmt::Display for GroveDBProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + GroveDBProof::V0(proof) => write!(f, "{}", proof), + } + } +} + +impl fmt::Display for GroveDBProofV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "GroveDBProofV0 {{")?; + for line in format!("{}", self.root_layer).lines() { + writeln!(f, " {}", line)?; + } + write!(f, "}}") + } +} + +fn decode_merk_proof(proof: &[u8]) -> String { + let mut result = String::new(); + let ops = Decoder::new(proof); + + for (i, op) in ops.enumerate() { + match op { + Ok(op) => { + result.push_str(&format!("\n {}: {}", i, op_to_string(&op))); + } + Err(e) => { + result.push_str(&format!("\n {}: Error decoding op: {}", i, e)); + } + } + } + + result +} + +fn op_to_string(op: &Op) -> String { + match op { + Op::Push(node) => format!("Push({})", node_to_string(node)), + Op::PushInverted(node) => format!("PushInverted({})", node_to_string(node)), + Op::Parent => "Parent".to_string(), + Op::Child => "Child".to_string(), + Op::ParentInverted => "ParentInverted".to_string(), + Op::ChildInverted => "ChildInverted".to_string(), + } +} + +fn node_to_string(node: &Node) -> String { + match node { + Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), + Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), + Node::KV(key, value) => { + format!("KV({}, {})", hex_to_ascii(key), element_hex_to_ascii(value)) + } + Node::KVValueHash(key, value, value_hash) => format!( + "KVValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVDigest(key, value_hash) => format!( + "KVDigest({}, HASH[{}])", + hex_to_ascii(key), + hex::encode(value_hash) + ), + Node::KVRefValueHash(key, value, value_hash) => format!( + "KVRefValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( + "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash), + feature_type + ), + } +} diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 11bdd754..e8063cea 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -15,9 +15,8 @@ use crate::operations::proof::util::{ }; use crate::{ operations::proof::{ - generate::{GroveDBProof, GroveDBProofV0, LayerProof}, util::{ProvedPathKeyOptionalValue, ProvedPathKeyValues}, - ProveOptions, + GroveDBProof, GroveDBProofV0, LayerProof, ProveOptions, }, query_result_type::PathKeyOptionalElementTrio, Element, Error, GroveDb, PathQuery, diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 356fe92e..c050e38c 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -143,7 +143,7 @@ impl QueryResultElements { } /// From elements - pub(crate) fn from_elements(elements: Vec) -> Self { + pub fn from_elements(elements: Vec) -> Self { QueryResultElements { elements } } diff --git a/merk/Cargo.toml b/merk/Cargo.toml index 7b050c9c..2d1c65be 100644 --- a/merk/Cargo.toml +++ b/merk/Cargo.toml @@ -19,15 +19,12 @@ indexmap = "2.2.6" grovedb-costs = { version = "1.0.0-rc.2", path = "../costs" } grovedb-visualize = { version = "1.0.0-rc.2", path = "../visualize" } grovedb-path = { version = "1.0.0-rc.2", path = "../path" } +hex = { version = "0.4.3" } [dependencies.time] version = "0.3.34" optional = true -[dependencies.hex] -version = "0.4.3" -optional = true - [dependencies.colored] version = "2.1.0" optional = true @@ -58,7 +55,6 @@ default = ["full"] proof_debug = [] full = ["rand", "time", - "hex", "colored", "num_cpus", "byteorder", diff --git a/merk/src/lib.rs b/merk/src/lib.rs index e1a35d1d..d746a885 100644 --- a/merk/src/lib.rs +++ b/merk/src/lib.rs @@ -70,7 +70,7 @@ mod visualize; #[cfg(feature = "full")] pub use ed; -#[cfg(feature = "full")] +#[cfg(any(feature = "full", feature = "verify"))] pub use error::Error; #[cfg(feature = "full")] pub use tree::{ diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 37388af4..107a1ec8 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -14,11 +14,11 @@ pub mod query_item; #[cfg(any(feature = "full", feature = "verify"))] mod verify; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use std::cmp::Ordering; use std::{collections::HashSet, fmt, ops::RangeFull}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use grovedb_costs::{cost_return_on_error, CostContext, CostResult, CostsExt, OperationCost}; #[cfg(any(feature = "full", feature = "verify"))] use indexmap::IndexMap; @@ -28,7 +28,7 @@ pub use map::*; pub use query_item::intersect::QueryItemIntersectionResult; #[cfg(any(feature = "full", feature = "verify"))] pub use query_item::QueryItem; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use verify::ProofAbsenceLimit; #[cfg(any(feature = "full", feature = "verify"))] pub use verify::VerifyOptions; @@ -37,7 +37,7 @@ pub use verify::{ProofVerificationResult, ProvedKeyOptionalValue, ProvedKeyValue #[cfg(feature = "full")] use {super::Op, std::collections::LinkedList}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use super::Node; #[cfg(any(feature = "full", feature = "verify"))] use crate::error::Error; diff --git a/merk/src/proofs/query/query_item/mod.rs b/merk/src/proofs/query/query_item/mod.rs index 6269c239..7c81a27e 100644 --- a/merk/src/proofs/query/query_item/mod.rs +++ b/merk/src/proofs/query/query_item/mod.rs @@ -10,7 +10,7 @@ use std::{ ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}, }; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use grovedb_costs::{CostContext, CostsExt, OperationCost}; #[cfg(feature = "full")] use grovedb_storage::RawIterator; @@ -427,7 +427,7 @@ impl QueryItem { } #[cfg(any(feature = "full", feature = "verify"))] - fn compare(a: &[u8], b: &[u8]) -> cmp::Ordering { + pub fn compare(a: &[u8], b: &[u8]) -> cmp::Ordering { for (ai, bi) in a.iter().zip(b.iter()) { match ai.cmp(bi) { Ordering::Equal => continue, diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 56902ce8..2e1727f4 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -1,17 +1,22 @@ -use std::{collections::LinkedList, fmt}; +#[cfg(feature = "full")] +use std::collections::LinkedList; +use std::fmt; use grovedb_costs::{cost_return_on_error, CostResult, CostsExt, OperationCost}; #[cfg(feature = "full")] -use crate::proofs::query::{Map, MapBuilder}; +use crate::proofs::{ + query::{Map, MapBuilder}, + Op, +}; use crate::{ error::Error, - proofs::{hex_to_ascii, tree::execute, Decoder, Node, Op, Query}, + proofs::{hex_to_ascii, tree::execute, Decoder, Node, Query}, tree::value_hash, CryptoHash as MerkHash, CryptoHash, }; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] pub type ProofAbsenceLimit = (LinkedList, (bool, bool), Option); #[cfg(feature = "full")] From e426f2318fe30a9254c17c282f285d886572c337 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 20:21:40 +0700 Subject: [PATCH 32/34] clippy fixes --- grovedb/src/element/mod.rs | 8 +- grovedb/src/element/query.rs | 6 +- grovedb/src/operations/proof/util.rs | 38 +++-- grovedb/src/operations/proof/verify.rs | 6 +- grovedb/src/query/mod.rs | 195 +++++++++---------------- grovedb/src/query_result_type.rs | 12 +- grovedb/src/reference_path.rs | 6 +- 7 files changed, 101 insertions(+), 170 deletions(-) diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index 2469ffe2..a6add9e6 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -112,9 +112,7 @@ impl fmt::Display for Element { write!( f, "Tree({}{})", - root_key - .as_ref() - .map_or("None".to_string(), |k| hex::encode(k)), + root_key.as_ref().map_or("None".to_string(), hex::encode), flags .as_ref() .map_or(String::new(), |f| format!(", flags: {:?}", f)) @@ -134,9 +132,7 @@ impl fmt::Display for Element { write!( f, "SumTree({}, {}{}", - root_key - .as_ref() - .map_or("None".to_string(), |k| hex::encode(k)), + root_key.as_ref().map_or("None".to_string(), hex::encode), sum_value, flags .as_ref() diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index 2eb8f556..48d9e34d 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -172,7 +172,7 @@ fn format_query(query: &Query, indent: usize) -> String { #[cfg(feature = "full")] fn format_subquery_branch(branch: &SubqueryBranch, indent: usize) -> String { let indent_str = " ".repeat(indent); - let mut output = format!("SubqueryBranch {{\n"); + let mut output = "SubqueryBranch {{\n".to_string(); if let Some(ref path) = branch.subquery_path { output += &format!("{} subquery_path: {:?},\n", indent_str, path); @@ -201,7 +201,7 @@ where writeln!( f, " key: {}", - self.key.map_or("None".to_string(), |k| hex_to_ascii(k)) + self.key.map_or("None".to_string(), hex_to_ascii) )?; writeln!(f, " element: {}", self.element)?; writeln!( @@ -209,7 +209,7 @@ where " path: [{}]", self.path .iter() - .map(|p| hex_to_ascii(*p)) + .map(|p| hex_to_ascii(p)) .collect::>() .join(", ") )?; diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index b3ceb604..50480c13 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -36,23 +36,23 @@ pub struct ProvedPathKeyOptionalValue { #[cfg(any(feature = "full", feature = "verify"))] impl fmt::Display for ProvedPathKeyOptionalValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "ProvedPathKeyValue {{\n")?; - write!( + writeln!(f, "ProvedPathKeyValue {{")?; + writeln!( f, - " path: [{}],\n", + " path: [{}],", self.path .iter() .map(|p| hex_to_ascii(p)) .collect::>() .join(", ") )?; - write!(f, " key: {},\n", hex_to_ascii(&self.key))?; - write!( + writeln!(f, " key: {},", hex_to_ascii(&self.key))?; + writeln!( f, - " value: {},\n", + " value: {},", optional_element_hex_to_ascii(self.value.as_ref()) )?; - write!(f, " proof: {}\n", hex::encode(self.proof))?; + writeln!(f, " proof: {}", hex::encode(self.proof))?; write!(f, "}}") } } @@ -74,23 +74,19 @@ pub struct ProvedPathKeyValue { #[cfg(any(feature = "full", feature = "verify"))] impl fmt::Display for ProvedPathKeyValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "ProvedPathKeyValue {{\n")?; - write!( + writeln!(f, "ProvedPathKeyValue {{")?; + writeln!( f, - " path: [{}],\n", + " path: [{}],", self.path .iter() .map(|p| hex_to_ascii(p)) .collect::>() .join(", ") )?; - write!(f, " key: {},\n", hex_to_ascii(&self.key))?; - write!( - f, - " value: {},\n", - element_hex_to_ascii(self.value.as_ref()) - )?; - write!(f, " proof: {}\n", hex::encode(self.proof))?; + writeln!(f, " key: {},", hex_to_ascii(&self.key))?; + writeln!(f, " value: {},", element_hex_to_ascii(self.value.as_ref()))?; + writeln!(f, " proof: {}", hex::encode(self.proof))?; write!(f, "}}") } } @@ -294,15 +290,15 @@ pub fn hex_to_ascii(hex_value: &[u8]) -> String { if hex_value.iter().all(|&c| ALLOWED_CHARS.contains(&c)) { // Try to convert to UTF-8 String::from_utf8(hex_value.to_vec()) - .unwrap_or_else(|_| format!("0x{}", hex::encode(&hex_value))) + .unwrap_or_else(|_| format!("0x{}", hex::encode(hex_value))) } else { // Hex encode and prepend "0x" - format!("0x{}", hex::encode(&hex_value)) + format!("0x{}", hex::encode(hex_value)) } } pub fn path_hex_to_ascii(path: &Path) -> String { - path.into_iter() + path.iter() .map(|e| hex_to_ascii(e.as_slice())) .collect::>() .join("/") @@ -310,7 +306,7 @@ pub fn path_hex_to_ascii(path: &Path) -> String { pub fn path_as_slices_hex_to_ascii(path: &[&[u8]]) -> String { path.into_iter() - .map(|e| hex_to_ascii(*e)) + .map(|e| hex_to_ascii(e)) .collect::>() .join("/") } diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index e8063cea..4e0375e3 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -32,7 +32,7 @@ impl GroveDb { // must have a limit query.query.limit.ok_or(Error::NotSupported( "limits must be set in verify_query_with_absence_proof".to_string(), - ))? as usize; + ))?; } // must have no offset @@ -249,7 +249,9 @@ impl GroveDb { let mut verified_keys = BTreeSet::new(); if merk_result.result_set.is_empty() { - limit_left.as_mut().map(|limit| *limit -= 1); + if prove_options.decrease_limit_on_empty_sub_query_result { + limit_left.as_mut().map(|limit| *limit -= 1); + } } else { for proved_key_value in merk_result.result_set { let mut path = current_path.to_vec(); diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index 49f88b0c..7b4fe42a 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -276,7 +276,7 @@ impl PathQuery { } } - pub fn query_items_at_path<'a>(&'a self, path: &[&[u8]]) -> Option { + pub fn query_items_at_path(&self, path: &[&[u8]]) -> Option { fn recursive_query_items<'b>( query: &'b Query, path: &[&[u8]], @@ -299,11 +299,9 @@ impl PathQuery { .all(|(a, b)| *a == b.as_slice()) { return if path_after_top_removed.len() == subquery_path.len() { - if let Some(subquery) = &subquery_branch.subquery { - Some(SinglePathSubquery::from_query(subquery)) - } else { - None - } + subquery_branch.subquery.as_ref().map(|subquery| { + SinglePathSubquery::from_query(subquery) + }) } else { let last_path_item = path.len() == subquery_path.len(); let has_subquery = subquery_branch.subquery.is_some(); @@ -350,11 +348,11 @@ impl PathQuery { // If we are asking what is the subquery when we are at 1 / 2 // we should get return if path_after_top_removed.len() == subquery_path.len() { - if let Some(subquery) = &query.default_subquery_branch.subquery { - Some(SinglePathSubquery::from_query(subquery)) - } else { - None - } + query + .default_subquery_branch + .subquery + .as_ref() + .map(|subquery| SinglePathSubquery::from_query(subquery)) } else { let last_path_item = path.len() == subquery_path.len(); let has_subquery = query.default_subquery_branch.subquery.is_some(); @@ -482,7 +480,7 @@ impl<'a> fmt::Display for SinglePathSubquery<'a> { writeln!(f, " has_subquery: {}", self.has_subquery)?; writeln!(f, " left_to_right: {}", self.left_to_right)?; match &self.in_path { - Some(path) => writeln!(f, " in_path: Some({})", hex_to_ascii(&path)), + Some(path) => writeln!(f, " in_path: Some({})", hex_to_ascii(path)), None => writeln!(f, " in_path: None"), }?; write!(f, "}}") @@ -493,13 +491,11 @@ impl<'a> SinglePathSubquery<'a> { /// Checks to see if we have a subquery on a specific key pub fn has_subquery_or_matching_in_path_on_key(&self, key: &[u8]) -> bool { if self.has_subquery.has_subquery_on_key(key) { - return true; + true + } else if let Some(path) = self.in_path.as_ref() { + path.as_slice() == key } else { - if let Some(path) = self.in_path.as_ref() { - path.as_slice() == key - } else { - false - } + false } } @@ -1516,66 +1512,60 @@ mod tests { }, }; - // { - // let path = vec![]; - // let first = path_query - // .query_items_at_path(&path) - // .expect("expected query items"); - // - // assert_eq!( - // first, - // InternalCowItemsQuery { - // items: Cow::Owned(vec![ - // QueryItem::Key(vec![20]), - // QueryItem::Key(vec![96]), - // ]), - // has_subquery: - // HasSubquery::Conditionally(Cow::Borrowed(&conditional_subquery_branches)), - // left_to_right: true, - // in_path: None, - // } - // ); - // } - // - // { - // let path = vec![key_20.as_slice()]; - // let query = path_query - // .query_items_at_path(&path) - // .expect("expected query items"); - // - // assert_eq!( - // query, - // InternalCowItemsQuery { - // items: Cow::Owned(vec![ - // QueryItem::Key(identity_id.clone()), - // ]), - // has_subquery: NoSubquery, - // left_to_right: true, - // in_path: Some(Cow::Borrowed(&identity_id)), - // } - // ); - // } - // - // { - // let path = vec![key_20.as_slice(), identity_id.as_slice()]; - // let query = path_query - // .query_items_at_path(&path) - // .expect("expected query items"); - // - // assert_eq!( - // query, - // InternalCowItemsQuery { - // items: Cow::Owned(vec![ - // QueryItem::Key(vec![80]), - // QueryItem::Key(vec![0xc0]), - // ]), - // has_subquery: - // HasSubquery::Conditionally(Cow::Borrowed(& - // inner_conditional_subquery_branches)), left_to_right: - // true, in_path: None, - // } - // ); - // } + { + let path = vec![]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(vec![20]), QueryItem::Key(vec![96]),]), + has_subquery: HasSubquery::Conditionally(Cow::Borrowed( + &conditional_subquery_branches + )), + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![key_20.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(identity_id.clone()),]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&identity_id)), + } + ); + } + + { + let path = vec![key_20.as_slice(), identity_id.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(vec![80]), QueryItem::Key(vec![0xc0]),]), + has_subquery: HasSubquery::Conditionally(Cow::Borrowed( + &inner_conditional_subquery_branches + )), + left_to_right: true, + in_path: None, + } + ); + } { let path = vec![key_20.as_slice(), identity_id.as_slice(), key_80.as_slice()]; @@ -1593,54 +1583,5 @@ mod tests { } ); } - // { - // let path = vec![ - // vec![20], - // ]; - // - // let second = path_query - // .query - // .query - // .query_items_at_path(&path) - // .expect("expected query items"); - // - // assert_eq!( - // second, - // InternalCowItemsQuery { - // items: Cow::Owned(vec![ - // QueryItem::Key(vec![80]), - // QueryItem::Key(vec![0xc0]), - // ]), - // has_subquery: HasSubquery::Always, - // left_to_right: true, - // in_path: Some(Cow::Borrowed(&vec![20])), - // } - // ); - // } - // - // { - // let path = vec![ - // vec![20], - // vec![80], - // ]; - // - // let third = path_query - // .query - // .query - // .query_items_at_path(&path) - // .expect("expected query items"); - // - // assert_eq!( - // third, - // InternalCowItemsQuery { - // items: Cow::Owned(vec![ - // QueryItem::RangeFull, - // ]), - // has_subquery: HasSubquery::Always, - // left_to_right: true, - // in_path: Some(Cow::Borrowed(&vec![80])), - // } - // ); - // } } } diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index c050e38c..6bf8bd5b 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -298,9 +298,7 @@ impl QueryResultElements { if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = result_item { - map.entry(path) - .or_insert_with(BTreeMap::new) - .insert(key, element); + map.entry(path).or_default().insert(key, element); } } @@ -316,9 +314,7 @@ impl QueryResultElements { result_item { if let Some(last) = path.pop() { - map.entry(last) - .or_insert_with(BTreeMap::new) - .insert(key, element); + map.entry(last).or_default().insert(key, element); } } } @@ -409,9 +405,9 @@ impl QueryResultElements { if let QueryResultElement::PathKeyElementTrioResultItem((mut path, key, _)) = result_item { - if let Some(_) = path.pop() { + if path.pop().is_some() { if let Some(last) = path.pop() { - map.entry(last).or_insert_with(Vec::new).push(key); + map.entry(last).or_default().push(key); } } } diff --git a/grovedb/src/reference_path.rs b/grovedb/src/reference_path.rs index e05c7507..09fc1684 100644 --- a/grovedb/src/reference_path.rs +++ b/grovedb/src/reference_path.rs @@ -87,9 +87,9 @@ pub enum ReferencePathType { } // Helper function to display paths -fn display_path(path: &Vec>) -> String { +fn display_path(path: &[Vec]) -> String { path.iter() - .map(|segment| hex::encode(segment)) + .map(hex::encode) .collect::>() .join("/") } @@ -219,7 +219,7 @@ pub fn path_from_reference_path_type>( no_of_elements_to_keep, mut path, ) => { - if usize::from(no_of_elements_to_keep) > current_path.len() || current_path.len() == 0 { + if usize::from(no_of_elements_to_keep) > current_path.len() || current_path.is_empty() { return Err(Error::InvalidInput( "reference stored path cannot satisfy reference constraints", )); From 0b09f9f5ba26ece08c81eb64fa9d86d20860038a Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 20:43:39 +0700 Subject: [PATCH 33/34] clippy fixes --- grovedb/src/operations/proof/generate.rs | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index 5640a70c..fe5866e8 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -170,7 +170,7 @@ impl GroveDb { .ok_or(Error::CorruptedPath(format!( "prove subqueries: path {} should be part of path_query {}", path.iter() - .map(|a| hex_to_ascii(*a)) + .map(|a| hex_to_ascii(a)) .collect::>() .join("/"), path_query @@ -199,7 +199,7 @@ impl GroveDb { println!( "generated merk proof at level path level [{}], limit is {:?}, {}", path.iter() - .map(|a| hex_to_ascii(*a)) + .map(|a| hex_to_ascii(a)) .collect::>() .join("/"), overall_limit, @@ -258,7 +258,9 @@ impl GroveDb { serialized_referenced_elem.expect("confirmed ok above"), value_hash(value).unwrap_add_cost(&mut cost), ); - overall_limit.as_mut().map(|limit| *limit -= 1); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } has_a_result_at_level |= true; } Ok(Element::Item(..)) if !done_with_results => { @@ -267,7 +269,9 @@ impl GroveDb { println!("found {}", hex_to_ascii(key)); } *node = Node::KV(key.to_owned(), value.to_owned()); - overall_limit.as_mut().map(|limit| *limit -= 1); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } has_a_result_at_level |= true; } Ok(Element::Tree(Some(_), _)) | Ok(Element::SumTree(Some(_), ..)) @@ -318,7 +322,9 @@ impl GroveDb { query ); } - overall_limit.as_mut().map(|limit| *limit -= 1); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } has_a_result_at_level |= true; } // todo: transform the unused trees into a Hash or KVHash to make proof @@ -345,12 +351,14 @@ impl GroveDb { println!( "no results at level {}", path.iter() - .map(|a| hex_to_ascii(*a)) + .map(|a| hex_to_ascii(a)) .collect::>() .join("/") ); } - overall_limit.as_mut().map(|limit| *limit -= 1); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } } let mut serialized_merk_proof = Vec::with_capacity(1024); @@ -368,7 +376,7 @@ impl GroveDb { fn generate_merk_proof<'a, S>( &self, subtree: &'a Merk, - query_items: &Vec, + query_items: &[QueryItem], left_to_right: bool, limit: Option, ) -> CostResult From 9dba1dae5e38bfd252085a8d224b973187f5ac34 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Tue, 9 Jul 2024 20:53:28 +0700 Subject: [PATCH 34/34] added documentation --- grovedb/src/operations/proof/mod.rs | 11 +++++++++++ merk/src/proofs/query/verify.rs | 5 +++++ 2 files changed, 16 insertions(+) diff --git a/grovedb/src/operations/proof/mod.rs b/grovedb/src/operations/proof/mod.rs index 306b02d8..88243d59 100644 --- a/grovedb/src/operations/proof/mod.rs +++ b/grovedb/src/operations/proof/mod.rs @@ -15,6 +15,17 @@ use crate::operations::proof::util::{element_hex_to_ascii, hex_to_ascii}; #[derive(Debug, Clone, Copy, Encode, Decode)] pub struct ProveOptions { + /// This tells the proof system to decrease the available limit of the query + /// by 1 in the case of empty subtrees. Generally this should be set to + /// true. The case where this could be set to false is if there is a + /// known structure where we know that there are only a few empty + /// subtrees. + /// + /// !!! Warning !!! Be very careful: + /// If this is set to `false` then you must be sure that the sub queries do + /// not match many trees, Otherwise you could crash the system as the + /// proof system goes through millions of subtrees and eventually runs + /// out of memory pub decrease_limit_on_empty_sub_query_result: bool, } diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 2e1727f4..e1d56511 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -44,8 +44,13 @@ pub fn verify(bytes: &[u8], expected_hash: MerkHash) -> CostResult { #[derive(Copy, Clone, Debug)] pub struct VerifyOptions { + /// When set to true, this will give back absence proofs for any query items + /// that are keys. This means QueryItem::Key(), and not the ranges. pub absence_proofs_for_non_existing_searched_keys: bool, + /// Verifies that we have all the data. Todo: verify that this works + /// properly pub verify_proof_succinctness: bool, + /// Should return empty trees in the result? pub include_empty_trees_in_result: bool, }