diff --git a/grovedb/Cargo.toml b/grovedb/Cargo.toml index 992cb19ee..28ba99957 100644 --- a/grovedb/Cargo.toml +++ b/grovedb/Cargo.toml @@ -19,10 +19,11 @@ grovedb-storage = { version = "1.0.0-rc.2", path = "../storage", optional = true grovedb-visualize = { version = "1.0.0-rc.2", path = "../visualize", optional = true } hex = { version = "0.4.3"} itertools = { version = "0.12.1", optional = true } +derive_more = { version = "0.99.18" } integer-encoding = { version = "4.0.0", optional = true } grovedb-costs = { version = "1.0.0-rc.2", path = "../costs", optional = true } nohash-hasher = { version = "0.2.0", optional = true } -indexmap = { version = "2.2.6", optional = true } +indexmap = { version = "2.2.6"} intmap = { version = "2.0.0", optional = true } grovedb-path = { version = "1.0.0-rc.2", path = "../path" } grovedbg-types = { path = "../grovedbg-types", optional = true } @@ -45,6 +46,7 @@ harness = false [features] default = ["full"] +proof_debug = ["grovedb-merk/proof_debug"] full = [ "grovedb-merk/full", "thiserror", @@ -55,7 +57,6 @@ full = [ "integer-encoding", "grovedb-costs", "nohash-hasher", - "indexmap", "intmap" ] visualize = [ diff --git a/grovedb/src/batch/mod.rs b/grovedb/src/batch/mod.rs index 8674672c4..474a304ec 100644 --- a/grovedb/src/batch/mod.rs +++ b/grovedb/src/batch/mod.rs @@ -1000,7 +1000,7 @@ where let mut merk = cost_return_on_error!(&mut cost, merk_wrapped); merk.set_base_root_key(root_key) .add_cost(cost) - .map_err(|_| Error::InternalError("unable to set base root key")) + .map_err(|_| Error::InternalError("unable to set base root key".to_string())) } fn execute_ops_on_path( @@ -1804,7 +1804,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(OperationCost::default()) } @@ -3493,7 +3493,7 @@ mod tests { reference_key_query.insert_key(b"key1".to_vec()); let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], reference_key_query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let verification_result = GroveDb::verify_query_raw(&proof, &path_query); diff --git a/grovedb/src/element/helpers.rs b/grovedb/src/element/helpers.rs index e7cb9df1f..59cc25633 100644 --- a/grovedb/src/element/helpers.rs +++ b/grovedb/src/element/helpers.rs @@ -43,15 +43,17 @@ use grovedb_merk::{ #[cfg(feature = "full")] use integer_encoding::VarInt; +#[cfg(feature = "full")] +use crate::reference_path::path_from_reference_path_type; #[cfg(any(feature = "full", feature = "verify"))] -use crate::reference_path::{path_from_reference_path_type, ReferencePathType}; -#[cfg(any(feature = "full", feature = "verify"))] -use crate::{element::SUM_ITEM_COST_SIZE, Element, Error}; +use crate::reference_path::ReferencePathType; #[cfg(feature = "full")] use crate::{ - element::{SUM_TREE_COST_SIZE, TREE_COST_SIZE}, + element::{SUM_ITEM_COST_SIZE, SUM_TREE_COST_SIZE, TREE_COST_SIZE}, ElementFlags, }; +#[cfg(any(feature = "full", feature = "verify"))] +use crate::{Element, Error}; impl Element { #[cfg(any(feature = "full", feature = "verify"))] @@ -133,9 +135,15 @@ impl Element { matches!(self, Element::SumTree(..)) } + #[cfg(any(feature = "full", feature = "verify"))] + /// Check if the element is a tree but not a sum tree + pub fn is_basic_tree(&self) -> bool { + matches!(self, Element::Tree(..)) + } + #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is a tree - pub fn is_tree(&self) -> bool { + pub fn is_any_tree(&self) -> bool { matches!(self, Element::SumTree(..) | Element::Tree(..)) } @@ -147,10 +155,16 @@ impl Element { #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is an item - pub fn is_item(&self) -> bool { + pub fn is_any_item(&self) -> bool { matches!(self, Element::Item(..) | Element::SumItem(..)) } + #[cfg(any(feature = "full", feature = "verify"))] + /// Check if the element is an item + pub fn is_basic_item(&self) -> bool { + matches!(self, Element::Item(..)) + } + #[cfg(any(feature = "full", feature = "verify"))] /// Check if the element is a sum item pub fn is_sum_item(&self) -> bool { diff --git a/grovedb/src/element/mod.rs b/grovedb/src/element/mod.rs index 4c29c4003..a6add9e64 100644 --- a/grovedb/src/element/mod.rs +++ b/grovedb/src/element/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Module for subtrees handling. //! Subtrees handling is isolated so basically this module is about adapting //! Merk API to GroveDB needs. @@ -48,8 +20,8 @@ mod query; pub use query::QueryOptions; #[cfg(any(feature = "full", feature = "verify"))] mod serialize; -#[cfg(feature = "full")] -use core::fmt; +#[cfg(any(feature = "full", feature = "verify"))] +use std::fmt; use bincode::{Decode, Encode}; #[cfg(any(feature = "full", feature = "verify"))] @@ -59,6 +31,7 @@ use grovedb_merk::estimated_costs::{LAYER_COST_SIZE, SUM_LAYER_COST_SIZE}; #[cfg(feature = "full")] use grovedb_visualize::visualize_to_vec; +use crate::operations::proof::util::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] use crate::reference_path::ReferencePathType; @@ -111,6 +84,65 @@ pub enum Element { SumTree(Option>, SumValue, Option), } +impl fmt::Display for Element { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Element::Item(data, flags) => { + write!( + f, + "Item({}{})", + hex_to_ascii(data), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::Reference(path, max_hop, flags) => { + write!( + f, + "Reference({}, max_hop: {}{})", + path, + max_hop.map_or("None".to_string(), |h| h.to_string()), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::Tree(root_key, flags) => { + write!( + f, + "Tree({}{})", + root_key.as_ref().map_or("None".to_string(), hex::encode), + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::SumItem(sum_value, flags) => { + write!( + f, + "SumItem({}{}", + sum_value, + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + Element::SumTree(root_key, sum_value, flags) => { + write!( + f, + "SumTree({}, {}{}", + root_key.as_ref().map_or("None".to_string(), hex::encode), + sum_value, + flags + .as_ref() + .map_or(String::new(), |f| format!(", flags: {:?}", f)) + ) + } + } + } +} + impl Element { pub fn type_str(&self) -> &str { match self { diff --git a/grovedb/src/element/query.rs b/grovedb/src/element/query.rs index c992ba268..48d9e34d4 100644 --- a/grovedb/src/element/query.rs +++ b/grovedb/src/element/query.rs @@ -29,6 +29,8 @@ //! Query //! Implements functions in Element for querying +use std::fmt; + #[cfg(feature = "full")] use grovedb_costs::{ cost_return_on_error, cost_return_on_error_no_add, CostContext, CostResult, CostsExt, @@ -36,13 +38,17 @@ use grovedb_costs::{ }; #[cfg(feature = "full")] use grovedb_merk::proofs::query::query_item::QueryItem; +#[cfg(feature = "full")] +use grovedb_merk::proofs::query::SubqueryBranch; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; +#[cfg(feature = "full")] use grovedb_path::SubtreePath; #[cfg(feature = "full")] use grovedb_storage::{rocksdb_storage::RocksDbStorage, RawIterator, StorageContext}; -use crate::query_result_type::Path; +#[cfg(feature = "full")] +use crate::operations::proof::util::hex_to_ascii; #[cfg(feature = "full")] use crate::{ element::helpers::raw_decode, @@ -57,7 +63,7 @@ use crate::{ Error, PathQuery, TransactionArg, }; #[cfg(any(feature = "full", feature = "verify"))] -use crate::{Element, SizedQuery}; +use crate::{query_result_type::Path, Element, SizedQuery}; #[cfg(any(feature = "full", feature = "verify"))] #[derive(Copy, Clone, Debug)] @@ -74,6 +80,26 @@ pub struct QueryOptions { pub error_if_intermediate_path_tree_not_present: bool, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for QueryOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "QueryOptions {{")?; + writeln!(f, " allow_get_raw: {}", self.allow_get_raw)?; + writeln!(f, " allow_cache: {}", self.allow_cache)?; + writeln!( + f, + " decrease_limit_on_range_with_no_sub_elements: {}", + self.decrease_limit_on_range_with_no_sub_elements + )?; + writeln!( + f, + " error_if_intermediate_path_tree_not_present: {}", + self.error_if_intermediate_path_tree_not_present + )?; + write!(f, "}}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl Default for QueryOptions { fn default() -> Self { @@ -107,6 +133,124 @@ where pub offset: &'a mut Option, } +#[cfg(feature = "full")] +fn format_query(query: &Query, indent: usize) -> String { + let indent_str = " ".repeat(indent); + let mut output = format!("{}Query {{\n", indent_str); + + output += &format!("{} items: [\n", indent_str); + for item in &query.items { + output += &format!("{} {},\n", indent_str, item); + } + output += &format!("{} ],\n", indent_str); + + output += &format!( + "{} default_subquery_branch: {}\n", + indent_str, + format_subquery_branch(&query.default_subquery_branch, indent + 2) + ); + + if let Some(ref branches) = query.conditional_subquery_branches { + output += &format!("{} conditional_subquery_branches: {{\n", indent_str); + for (item, branch) in branches { + output += &format!( + "{} {}: {},\n", + indent_str, + item, + format_subquery_branch(branch, indent + 4) + ); + } + output += &format!("{} }},\n", indent_str); + } + + output += &format!("{} left_to_right: {}\n", indent_str, query.left_to_right); + output += &format!("{}}}", indent_str); + + output +} + +#[cfg(feature = "full")] +fn format_subquery_branch(branch: &SubqueryBranch, indent: usize) -> String { + let indent_str = " ".repeat(indent); + let mut output = "SubqueryBranch {{\n".to_string(); + + if let Some(ref path) = branch.subquery_path { + output += &format!("{} subquery_path: {:?},\n", indent_str, path); + } + + if let Some(ref subquery) = branch.subquery { + output += &format!( + "{} subquery: {},\n", + indent_str, + format_query(subquery, indent + 2) + ); + } + + output += &format!("{}}}", " ".repeat(indent)); + + output +} + +#[cfg(feature = "full")] +impl<'db, 'ctx, 'a> fmt::Display for PathQueryPushArgs<'db, 'ctx, 'a> +where + 'db: 'ctx, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "PathQueryPushArgs {{")?; + writeln!( + f, + " key: {}", + self.key.map_or("None".to_string(), hex_to_ascii) + )?; + writeln!(f, " element: {}", self.element)?; + writeln!( + f, + " path: [{}]", + self.path + .iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join(", ") + )?; + writeln!( + f, + " subquery_path: {}", + self.subquery_path + .as_ref() + .map_or("None".to_string(), |p| format!( + "[{}]", + p.iter() + .map(|e| hex_to_ascii(e.as_slice())) + .collect::>() + .join(", ") + )) + )?; + writeln!( + f, + " subquery: {}", + self.subquery + .as_ref() + .map_or("None".to_string(), |q| format!("\n{}", format_query(q, 4))) + )?; + writeln!(f, " left_to_right: {}", self.left_to_right)?; + writeln!(f, " query_options: {}", self.query_options)?; + writeln!(f, " result_type: {}", self.result_type)?; + writeln!( + f, + " results: [{}]", + self.results + .iter() + .map(|r| format!("{}", r)) + .collect::>() + .join(", ") + )?; + writeln!(f, " limit: {:?}", self.limit)?; + writeln!(f, " offset: {:?}", self.offset)?; + write!(f, "}}") + } +} + impl Element { #[cfg(feature = "full")] /// Returns a vector of result elements based on given query @@ -285,6 +429,8 @@ impl Element { #[cfg(feature = "full")] /// Push arguments to path query fn path_query_push(args: PathQueryPushArgs) -> CostResult<(), Error> { + // println!("path_query_push {} \n", args); + let mut cost = OperationCost::default(); let PathQueryPushArgs { @@ -308,7 +454,7 @@ impl Element { decrease_limit_on_range_with_no_sub_elements, .. } = query_options; - if element.is_tree() { + if element.is_any_tree() { let mut path_vec = path.to_vec(); let key = cost_return_on_error_no_add!( &cost, @@ -623,7 +769,7 @@ impl Element { } } else { Err(Error::InternalError( - "QueryItem must be a Key if not a range", + "QueryItem must be a Key if not a range".to_string(), )) } } else { @@ -698,6 +844,7 @@ impl Element { #[cfg(feature = "full")] fn basic_push(args: PathQueryPushArgs) -> Result<(), Error> { + // println!("basic_push {}", args); let PathQueryPushArgs { path, key, @@ -717,14 +864,18 @@ impl Element { results.push(QueryResultElement::ElementResultItem(element)); } QueryResultType::QueryKeyElementPairResultType => { - let key = key.ok_or(Error::CorruptedPath("basic push must have a key"))?; + let key = key.ok_or(Error::CorruptedPath( + "basic push must have a key".to_string(), + ))?; results.push(QueryResultElement::KeyElementPairResultItem(( Vec::from(key), element, ))); } QueryResultType::QueryPathKeyElementTrioResultType => { - let key = key.ok_or(Error::CorruptedPath("basic push must have a key"))?; + let key = key.ok_or(Error::CorruptedPath( + "basic push must have a key".to_string(), + ))?; let path = path.iter().map(|a| a.to_vec()).collect(); results.push(QueryResultElement::PathKeyElementTrioResultItem(( path, diff --git a/grovedb/src/error.rs b/grovedb/src/error.rs index 956b53439..c430c5ae1 100644 --- a/grovedb/src/error.rs +++ b/grovedb/src/error.rs @@ -1,9 +1,14 @@ //! GroveDB Errors +use std::convert::Infallible; + /// GroveDB Errors #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, thiserror::Error)] pub enum Error { + #[error("infallible")] + /// This error can not happen, used for generics + Infallible, // Input data errors #[error("cyclic reference path")] /// Cyclic reference @@ -16,7 +21,7 @@ pub enum Error { MissingReference(String), #[error("internal error: {0}")] /// Internal error - InternalError(&'static str), + InternalError(String), #[error("invalid proof: {0}")] /// Invalid proof InvalidProof(String), @@ -62,7 +67,7 @@ pub enum Error { /// The corrupted path represents a consistency error in internal groveDB /// logic #[error("corrupted path: {0}")] - CorruptedPath(&'static str), + CorruptedPath(String), // Query errors #[error("invalid query: {0}")] @@ -85,6 +90,10 @@ pub enum Error { /// Corrupted data CorruptedData(String), + #[error("data storage error: {0}")] + /// Corrupted storage + CorruptedStorage(String), + #[error("invalid code execution error: {0}")] /// Invalid code execution InvalidCodeExecution(&'static str), @@ -139,3 +148,15 @@ pub enum Error { /// Merk error MerkError(grovedb_merk::error::Error), } + +impl From for Error { + fn from(_value: Infallible) -> Self { + Self::Infallible + } +} + +impl From for Error { + fn from(value: grovedb_merk::Error) -> Self { + Error::MerkError(value) + } +} diff --git a/grovedb/src/lib.rs b/grovedb/src/lib.rs index 9a0068eb5..206ace711 100644 --- a/grovedb/src/lib.rs +++ b/grovedb/src/lib.rs @@ -164,7 +164,6 @@ pub mod replication; mod tests; #[cfg(feature = "full")] mod util; -mod versioning; #[cfg(feature = "full")] mod visualize; @@ -202,6 +201,7 @@ use grovedb_merk::{ tree::{combine_hash, value_hash}, BatchEntry, CryptoHash, KVIterator, Merk, }; +#[cfg(feature = "full")] use grovedb_path::SubtreePath; #[cfg(feature = "full")] use grovedb_storage::rocksdb_storage::PrefixedRocksDbImmediateStorageContext; @@ -227,7 +227,7 @@ use crate::element::helpers::raw_decode; pub use crate::error::Error; #[cfg(feature = "full")] use crate::util::{root_merk_optional_tx, storage_context_optional_tx}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use crate::Error::MerkError; #[cfg(feature = "full")] @@ -239,6 +239,7 @@ pub struct GroveDb { db: RocksDbStorage, } +#[cfg(feature = "full")] pub(crate) type SubtreePrefix = [u8; blake3::OUT_LEN]; /// Transaction @@ -318,7 +319,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(cost) } @@ -378,7 +379,7 @@ impl GroveDb { .unwrap() } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) } } else { @@ -438,7 +439,7 @@ impl GroveDb { .add_cost(cost) } else { Err(Error::CorruptedPath( - "cannot open a subtree as parent exists but is not a tree", + "cannot open a subtree as parent exists but is not a tree".to_string(), )) .wrap_with_cost(cost) } @@ -894,7 +895,7 @@ impl GroveDb { while let Some((key, element_value)) = element_iterator.next_kv().unwrap() { let element = raw_decode(&element_value)?; - if element.is_tree() { + if element.is_any_tree() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -924,7 +925,7 @@ impl GroveDb { ); } issues.extend(self.verify_merk_and_submerks(inner_merk, &new_path_ref, batch)?); - } else if element.is_item() { + } else if element.is_any_item() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -964,7 +965,7 @@ impl GroveDb { while let Some((key, element_value)) = element_iterator.next_kv().unwrap() { let element = raw_decode(&element_value)?; - if element.is_tree() { + if element.is_any_tree() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, @@ -999,7 +1000,7 @@ impl GroveDb { batch, transaction, )?); - } else if element.is_item() { + } else if element.is_any_item() { let (kv_value, element_value_hash) = merk .get_value_and_value_hash( &key, diff --git a/grovedb/src/operations.rs b/grovedb/src/operations.rs deleted file mode 100644 index 9864b0bc5..000000000 --- a/grovedb/src/operations.rs +++ /dev/null @@ -1,45 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Operations for the manipulation of GroveDB state - -#[cfg(feature = "full")] -pub(crate) mod auxiliary; -#[cfg(feature = "full")] -pub mod delete; -#[cfg(feature = "full")] -pub(crate) mod get; -#[cfg(feature = "full")] -pub mod insert; -#[cfg(feature = "full")] -pub(crate) mod is_empty_tree; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod proof; - -#[cfg(feature = "full")] -pub use get::{QueryItemOrSumReturnType, MAX_REFERENCE_HOPS}; diff --git a/grovedb/src/operations/auxiliary.rs b/grovedb/src/operations/auxiliary.rs index 1b6b884d1..6f9fd576a 100644 --- a/grovedb/src/operations/auxiliary.rs +++ b/grovedb/src/operations/auxiliary.rs @@ -155,7 +155,7 @@ impl GroveDb { while let Some((key, value)) = cost_return_on_error!(&mut cost, raw_iter.next_element()) { - if value.is_tree() { + if value.is_any_tree() { let mut sub_path = q.clone(); sub_path.push(key.to_vec()); queue.push(sub_path.clone()); diff --git a/grovedb/src/operations/delete/mod.rs b/grovedb/src/operations/delete/mod.rs index 84d14652c..d13fdd612 100644 --- a/grovedb/src/operations/delete/mod.rs +++ b/grovedb/src/operations/delete/mod.rs @@ -221,7 +221,7 @@ impl GroveDb { element_iterator.next_kv().unwrap_add_cost(&mut cost) { let element = raw_decode(&element_value).unwrap(); - if element.is_tree() { + if element.is_any_tree() { if options.allow_deleting_subtrees { cost_return_on_error!( &mut cost, @@ -284,7 +284,7 @@ impl GroveDb { { let element = raw_decode(&element_value).unwrap(); if options.allow_deleting_subtrees { - if element.is_tree() { + if element.is_any_tree() { cost_return_on_error!( &mut cost, self.delete( @@ -636,7 +636,7 @@ impl GroveDb { self.open_transactional_merk_at_path(path.clone(), transaction, Some(batch)) ); let uses_sum_tree = subtree_to_delete_from.is_sum_tree; - if element.is_tree() { + if element.is_any_tree() { let subtree_merk_path = path.derive_owned_with_child(key); let subtree_merk_path_ref = SubtreePath::from(&subtree_merk_path); @@ -800,7 +800,7 @@ impl GroveDb { self.open_non_transactional_merk_at_path(path.clone(), Some(batch)) ); let uses_sum_tree = subtree_to_delete_from.is_sum_tree; - if element.is_tree() { + if element.is_any_tree() { let subtree_merk_path = path.derive_owned_with_child(key); let subtree_of_tree_we_are_deleting = cost_return_on_error!( &mut cost, diff --git a/grovedb/src/operations/get/mod.rs b/grovedb/src/operations/get/mod.rs index 127001061..4cc9f9491 100644 --- a/grovedb/src/operations/get/mod.rs +++ b/grovedb/src/operations/get/mod.rs @@ -143,7 +143,7 @@ impl GroveDb { }) ) } else { - return Err(Error::CorruptedPath("empty path")).wrap_with_cost(cost); + return Err(Error::CorruptedPath("empty path".to_string())).wrap_with_cost(cost); } visited.insert(current_path.clone()); match current_element { diff --git a/grovedb/src/operations/get/query.rs b/grovedb/src/operations/get/query.rs index 7e29b2332..6ba914ef5 100644 --- a/grovedb/src/operations/get/query.rs +++ b/grovedb/src/operations/get/query.rs @@ -38,7 +38,10 @@ use integer_encoding::VarInt; #[cfg(feature = "full")] use crate::element::SumValue; -use crate::{element::QueryOptions, query_result_type::PathKeyOptionalElementTrio}; +use crate::{ + element::QueryOptions, operations::proof::ProveOptions, + query_result_type::PathKeyOptionalElementTrio, +}; #[cfg(feature = "full")] use crate::{ query_result_type::{QueryResultElement, QueryResultElements, QueryResultType}, @@ -152,7 +155,7 @@ where { pub fn get_proved_path_query( &self, path_query: &PathQuery, - is_verbose: bool, + prove_options: Option, transaction: TransactionArg, ) -> CostResult, Error> { if transaction.is_some() { @@ -160,10 +163,8 @@ where { "transactions are not currently supported".to_string(), )) .wrap_with_cost(Default::default()) - } else if is_verbose { - self.prove_verbose(path_query) } else { - self.prove_query(path_query) + self.prove_query(path_query, prove_options) } } @@ -191,7 +192,7 @@ where { ) .unwrap_add_cost(cost)?; - if maybe_item.is_item() { + if maybe_item.is_any_item() { Ok(maybe_item) } else { Err(Error::InvalidQuery("the reference must result in an item")) @@ -1273,8 +1274,7 @@ mod tests { let path = vec![TEST_LEAF.to_vec()]; let path_query = PathQuery::new(path, SizedQuery::new(query, Some(1000), None)); - let raw_result = db - .query_raw_keys_optional(&path_query, true, true, true, None) + db.query_raw_keys_optional(&path_query, true, true, true, None) .unwrap() .expect_err( "query with subquery should error if error_if_intermediate_path_tree_not_present \ diff --git a/grovedb/src/operations/insert/mod.rs b/grovedb/src/operations/insert/mod.rs index 513e20981..5670a9393 100644 --- a/grovedb/src/operations/insert/mod.rs +++ b/grovedb/src/operations/insert/mod.rs @@ -239,7 +239,7 @@ impl GroveDb { Error::CorruptedData(String::from("unable to deserialize element")) }) ); - if element.is_tree() { + if element.is_any_tree() { return Err(Error::OverrideNotAllowed( "insertion not allowed to override tree", )) @@ -378,7 +378,7 @@ impl GroveDb { Error::CorruptedData(String::from("unable to deserialize element")) }) ); - if element.is_tree() { + if element.is_any_tree() { return Err(Error::OverrideNotAllowed( "insertion not allowed to override tree", )) diff --git a/grovedb/src/operations/mod.rs b/grovedb/src/operations/mod.rs new file mode 100644 index 000000000..ba9b85999 --- /dev/null +++ b/grovedb/src/operations/mod.rs @@ -0,0 +1,18 @@ +//! Operations for the manipulation of GroveDB state + +#[cfg(feature = "full")] +pub(crate) mod auxiliary; +#[cfg(feature = "full")] +pub mod delete; +#[cfg(feature = "full")] +pub(crate) mod get; +#[cfg(feature = "full")] +pub mod insert; +#[cfg(feature = "full")] +pub(crate) mod is_empty_tree; + +#[cfg(any(feature = "full", feature = "verify"))] +pub mod proof; + +#[cfg(feature = "full")] +pub use get::{QueryItemOrSumReturnType, MAX_REFERENCE_HOPS}; diff --git a/grovedb/src/operations/proof.rs b/grovedb/src/operations/proof.rs deleted file mode 100644 index 1734c6c6c..000000000 --- a/grovedb/src/operations/proof.rs +++ /dev/null @@ -1,36 +0,0 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Proof operations - -#[cfg(feature = "full")] -mod generate; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod util; -#[cfg(any(feature = "full", feature = "verify"))] -pub mod verify; diff --git a/grovedb/src/operations/proof/generate.rs b/grovedb/src/operations/proof/generate.rs index fad64c842..fe5866e8b 100644 --- a/grovedb/src/operations/proof/generate.rs +++ b/grovedb/src/operations/proof/generate.rs @@ -1,84 +1,42 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Generate proof operations -// TODO: entire file is due for a refactor, need some kind of path generator -// that supports multiple implementations for verbose and non-verbose -// generation +use std::collections::BTreeMap; use grovedb_costs::{ cost_return_on_error, cost_return_on_error_default, cost_return_on_error_no_add, CostResult, CostsExt, OperationCost, }; use grovedb_merk::{ - proofs::{encode_into, Node, Op}, + proofs::{encode_into, query::QueryItem, Node, Op}, tree::value_hash, - KVIterator, Merk, ProofWithoutEncodingResult, + Merk, ProofWithoutEncodingResult, }; -use grovedb_path::SubtreePath; use grovedb_storage::StorageContext; +#[cfg(feature = "proof_debug")] +use crate::query_result_type::QueryResultType; use crate::{ - element::helpers::raw_decode, - operations::proof::util::{ - increase_limit_and_offset_by, reduce_limit_and_offset_by, write_slice_of_slice_to_slice, - write_slice_to_vec, write_to_vec, ProofTokenType, EMPTY_TREE_HASH, + operations::proof::{ + util::hex_to_ascii, GroveDBProof, GroveDBProofV0, LayerProof, ProveOptions, }, reference_path::path_from_reference_path_type, - versioning::{prepend_version_to_bytes, PROOF_VERSION}, - Element, Error, GroveDb, PathQuery, Query, + Element, Error, GroveDb, PathQuery, }; -type LimitOffset = (Option, Option); - impl GroveDb { /// Prove one or more path queries. - /// If we more than one path query, we merge into a single path query before - /// proving. - pub fn prove_query_many(&self, query: Vec<&PathQuery>) -> CostResult, Error> { - if query.len() > 1 { - let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_query(&query) - } else { - self.prove_query(query[0]) - } - } - - /// Prove one or more path queries verbose. - /// If we more than one path query, we merge into a single path query before - /// proving verbose. - pub fn prove_verbose_many(&self, query: Vec<&PathQuery>) -> CostResult, Error> { + /// If we have more than one path query, we merge into a single path query + /// before proving. + pub fn prove_query_many( + &self, + query: Vec<&PathQuery>, + prove_options: Option, + ) -> CostResult, Error> { if query.len() > 1 { let query = cost_return_on_error_default!(PathQuery::merge(query)); - self.prove_verbose(&query) + self.prove_query(&query, prove_options) } else { - self.prove_verbose(query[0]) + self.prove_query(query[0], prove_options) } } @@ -86,551 +44,185 @@ impl GroveDb { /// doesn't allow for subset verification /// Proofs generated with this can only be verified by the path query used /// to generate them. - pub fn prove_query(&self, query: &PathQuery) -> CostResult, Error> { - self.prove_internal(query, false) - } - - /// Generate a verbose proof for a given path query - /// Any path query that is a subset of the original proof generating path - /// query can be used to verify this (subset verification) - pub fn prove_verbose(&self, query: &PathQuery) -> CostResult, Error> { - // TODO: we need to solve the localized limit and offset problem. - // when using a path query that has a limit and offset value, - // to get the expected behaviour, you need to know exactly - // how the proving internals work and how your state looks. - self.prove_internal(query, true) + pub fn prove_query( + &self, + query: &PathQuery, + prove_options: Option, + ) -> CostResult, Error> { + self.prove_internal_serialized(query, prove_options) } - /// Generates a verbose or non verbose proof based on a bool - fn prove_internal(&self, query: &PathQuery, is_verbose: bool) -> CostResult, Error> { + /// Generates a proof and serializes it + fn prove_internal_serialized( + &self, + path_query: &PathQuery, + prove_options: Option, + ) -> CostResult, Error> { let mut cost = OperationCost::default(); - - let mut proof_result = - cost_return_on_error_default!(prepend_version_to_bytes(vec![], PROOF_VERSION)); - - let mut limit: Option = query.query.limit; - let mut offset: Option = query.query.offset; - - let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); - - let subtree_exists = self - .check_subtree_exists_path_not_found(path_slices.as_slice().into(), None) - .unwrap_add_cost(&mut cost); - - // if the subtree at the given path doesn't exists, prove that this path - // doesn't point to a valid subtree - match subtree_exists { - Ok(_) => { - // subtree exists - // do nothing - } - Err(_) => { - cost_return_on_error!( - &mut cost, - self.generate_and_store_absent_path_proof( - &path_slices, - &mut proof_result, - is_verbose - ) - ); - // return the absence proof no need to continue proof generation - return Ok(proof_result).wrap_with_cost(cost); - } - } - - // if the subtree exists and the proof type is verbose we need to insert - // the path information to the proof - if is_verbose { - cost_return_on_error!( - &mut cost, - Self::generate_and_store_path_proof(path_slices.clone(), &mut proof_result) - ); + let proof = + cost_return_on_error!(&mut cost, self.prove_internal(path_query, prove_options)); + #[cfg(feature = "proof_debug")] + { + println!("constructed proof is {}", proof); } - - cost_return_on_error!( - &mut cost, - self.prove_subqueries( - &mut proof_result, - path_slices.clone(), - query, - &mut limit, - &mut offset, - true, - is_verbose - ) - ); - cost_return_on_error!( - &mut cost, - self.prove_path(&mut proof_result, path_slices, is_verbose) + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let encoded_proof = cost_return_on_error_no_add!( + &cost, + bincode::encode_to_vec(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to encode proof {}", e))) ); - - Ok(proof_result).wrap_with_cost(cost) + Ok(encoded_proof).wrap_with_cost(cost) } - /// Perform a pre-order traversal of the tree based on the provided - /// subqueries - fn prove_subqueries( + /// Generates a proof + fn prove_internal( &self, - proofs: &mut Vec, - path: Vec<&[u8]>, - query: &PathQuery, - current_limit: &mut Option, - current_offset: &mut Option, - is_first_call: bool, - is_verbose: bool, - ) -> CostResult<(), Error> { + path_query: &PathQuery, + prove_options: Option, + ) -> CostResult { let mut cost = OperationCost::default(); - let mut to_add_to_result_set: u16 = 0; - let subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path(path.as_slice().into(), None) - ); - if subtree.root_hash().unwrap_add_cost(&mut cost) == EMPTY_TREE_HASH { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(()).wrap_with_cost(cost); - } + let prove_options = prove_options.unwrap_or_default(); - let reached_limit = query.query.limit.is_some() && query.query.limit.unwrap() == 0; - if reached_limit { - if is_first_call { - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - (*current_limit, *current_offset), - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&(&[][..])) - ) - ); - } - return Ok(()).wrap_with_cost(cost); + if path_query.query.offset.is_some() && path_query.query.offset != Some(0) { + return Err(Error::InvalidQuery( + "proved path queries can not have offsets", + )) + .wrap_with_cost(cost); } - let mut is_leaf_tree = true; - - let mut offset_inc = 0; - let mut limit_inc = 0; - - let mut kv_iterator = KVIterator::new(subtree.storage.raw_iter(), &query.query.query) - .unwrap_add_cost(&mut cost); - - while let Some((key, value_bytes)) = kv_iterator.next_kv().unwrap_add_cost(&mut cost) { - let mut encountered_absence = false; - - let element = cost_return_on_error_no_add!(&cost, raw_decode(&value_bytes)); - match element { - Element::Tree(root_key, _) | Element::SumTree(root_key, ..) => { - let (mut subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query(&query.query, &key); - - if subquery_value.is_none() && subquery_path.is_none() { - // this element should be added to the result set - // hence we have to update the limit and offset value - let reduced_offset = - reduce_limit_and_offset_by(current_limit, current_offset, 1); - if reduced_offset { - offset_inc += 1; - } else { - limit_inc += 1; - } - continue; - } - - if root_key.is_none() { - continue; - } - - // if the element is a non empty tree then current tree is not a leaf tree - if is_leaf_tree { - is_leaf_tree = false; - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - (None, None), - ProofTokenType::Merk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) - ) - ); - } - - let mut new_path = path.clone(); - new_path.push(key.as_ref()); - - let mut query = subquery_value; - - if query.is_some() { - if let Some(subquery_path) = &subquery_path { - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None, - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - (None, None), - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); - - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) - .unwrap_add_cost(&mut cost) - .is_err() - { - encountered_absence = true; - break; - } - } - - if encountered_absence { - continue; - } - } - } else if let Some(subquery_path) = &mut subquery_path { - if subquery_path.is_empty() { - // nothing to do on this path, since subquery path is empty - // and there is no consecutive subquery value - continue; - } - - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subkey in subquery_path.iter() { - let inner_subtree = cost_return_on_error!( - &mut cost, - self.open_non_transactional_merk_at_path( - new_path.as_slice().into(), - None - ) - ); - - let mut key_as_query = Query::new(); - key_as_query.insert_key(subkey.clone()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &new_path.as_slice().into(), - &inner_subtree, - &key_as_query, - (None, None), - ProofTokenType::Merk, - proofs, - is_verbose, - new_path.iter().last().unwrap_or(&Default::default()) - ) - ); - - new_path.push(subkey); - - // check if the new path points to a valid subtree - // if it does not, we should stop proof generation on this path - if self - .check_subtree_exists_path_not_found( - new_path.as_slice().into(), - None, - ) - .unwrap_add_cost(&mut cost) - .is_err() - { - encountered_absence = true; - break; - } - } - - if encountered_absence { - continue; - } - - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key); - query = Some(key_as_query); - } else { - return Err(Error::CorruptedCodeExecution("subquery_path must exist")) - .wrap_with_cost(cost); - } - - let new_path_owned = new_path.iter().map(|a| a.to_vec()).collect(); - - let new_path_query = PathQuery::new_unsized(new_path_owned, query.unwrap()); - - if self - .check_subtree_exists_path_not_found(new_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost) - .is_err() - { - continue; - } - - cost_return_on_error!( - &mut cost, - self.prove_subqueries( - proofs, - new_path, - &new_path_query, - current_limit, - current_offset, - false, - is_verbose, - ) - ); - - if *current_limit == Some(0) { - break; - } - } - _ => { - to_add_to_result_set += 1; - } - } + if path_query.query.limit == Some(0) { + return Err(Error::InvalidQuery( + "proved path queries can not be for limit 0", + )) + .wrap_with_cost(cost); } - if is_leaf_tree { - // if no useful subtree, then we care about the result set of this subtree. - // apply the sized query - increase_limit_and_offset_by(current_limit, current_offset, limit_inc, offset_inc); - let limit_offset = cost_return_on_error!( + #[cfg(feature = "proof_debug")] + { + // we want to query raw because we want the references to not be resolved at + // this point + + let values = cost_return_on_error!( &mut cost, - self.generate_and_store_merk_proof( - &path.as_slice().into(), - &subtree, - &query.query.query, - (*current_limit, *current_offset), - ProofTokenType::SizedMerk, - proofs, - is_verbose, - path.iter().last().unwrap_or(&Default::default()) + self.query_raw( + path_query, + false, + prove_options.decrease_limit_on_empty_sub_query_result, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None ) - ); - - // update limit and offset values - *current_limit = limit_offset.0; - *current_offset = limit_offset.1; - } else { - reduce_limit_and_offset_by(current_limit, current_offset, to_add_to_result_set); - } + ) + .0; - Ok(()).wrap_with_cost(cost) - } + println!("values are {}", values); - /// Given a path, construct and append a set of proofs that shows there is - /// a valid path from the root of the db to that point. - fn prove_path( - &self, - proof_result: &mut Vec, - path_slices: Vec<&[u8]>, - is_verbose: bool, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); - - // generate proof to show that the path leads up to the root - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - let subtree = cost_return_on_error!( + let precomputed_result_map = cost_return_on_error!( &mut cost, - self.open_non_transactional_merk_at_path(path_slice.into(), None) - ); - let mut query = Query::new(); - query.insert_key(key.to_vec()); - - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - &path_slice.into(), - &subtree, - &query, - (None, None), - ProofTokenType::Merk, - proof_result, - is_verbose, - path_slice.iter().last().unwrap_or(&Default::default()) + self.query_raw( + path_query, + false, + prove_options.decrease_limit_on_empty_sub_query_result, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None ) - ); - split_path = path_slice.split_last(); - } - Ok(()).wrap_with_cost(cost) - } + ) + .0 + .to_btree_map_level_results(); - /// Generates query proof given a subtree and appends the result to a proof - /// list - fn generate_and_store_merk_proof<'a, S, B>( - &self, - path: &SubtreePath, - subtree: &'a Merk, - query: &Query, - limit_offset: LimitOffset, - proof_token_type: ProofTokenType, - proofs: &mut Vec, - is_verbose: bool, - key: &[u8], - ) -> CostResult<(Option, Option), Error> - where - S: StorageContext<'a> + 'a, - B: AsRef<[u8]>, - { - if proof_token_type != ProofTokenType::Merk && proof_token_type != ProofTokenType::SizedMerk - { - return Err(Error::InvalidInput( - "expect proof type for merk proof generation to be sized or merk proof type", - )) - .wrap_with_cost(Default::default()); + println!("precomputed results are {}", precomputed_result_map); } - let mut cost = OperationCost::default(); - - // if the subtree is empty, return the EmptyTree proof op - if subtree.root_hash().unwrap() == EMPTY_TREE_HASH { - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::EmptyTree.into()]) - ); - return Ok(limit_offset).wrap_with_cost(cost); - } + let mut limit = path_query.query.limit; - let mut proof_result = cost_return_on_error_no_add!( - &cost, - subtree - .prove_without_encoding(query.clone(), limit_offset.0, limit_offset.1) - .unwrap() - .map_err(|_e| Error::InternalError("failed to generate proof")) + let root_layer = cost_return_on_error!( + &mut cost, + self.prove_subqueries(vec![], path_query, &mut limit, &prove_options) ); - cost_return_on_error!(&mut cost, self.post_process_proof(path, &mut proof_result)); - - let mut proof_bytes = Vec::with_capacity(128); - encode_into(proof_result.proof.iter(), &mut proof_bytes); - - cost_return_on_error_no_add!(&cost, write_to_vec(proofs, &[proof_token_type.into()])); - - // if is verbose, write the key - if is_verbose { - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, key)); + Ok(GroveDBProofV0 { + root_layer, + prove_options, } - - // write the merk proof - cost_return_on_error_no_add!(&cost, write_slice_to_vec(proofs, &proof_bytes)); - - Ok((proof_result.limit, proof_result.offset)).wrap_with_cost(cost) - } - - /// Serializes a path and add it to the proof vector - fn generate_and_store_path_proof( - path: Vec<&[u8]>, - proofs: &mut Vec, - ) -> CostResult<(), Error> { - let cost = OperationCost::default(); - - cost_return_on_error_no_add!( - &cost, - write_to_vec(proofs, &[ProofTokenType::PathInfo.into()]) - ); - - cost_return_on_error_no_add!(&cost, write_slice_of_slice_to_slice(proofs, &path)); - - Ok(()).wrap_with_cost(cost) + .into()) + .wrap_with_cost(cost) } - fn generate_and_store_absent_path_proof( + /// Perform a pre-order traversal of the tree based on the provided + /// subqueries + fn prove_subqueries( &self, - path_slices: &[&[u8]], - proof_result: &mut Vec, - is_verbose: bool, - ) -> CostResult<(), Error> { + path: Vec<&[u8]>, + path_query: &PathQuery, + overall_limit: &mut Option, + prove_options: &ProveOptions, + ) -> CostResult { let mut cost = OperationCost::default(); - cost_return_on_error_no_add!( + let query = cost_return_on_error_no_add!( &cost, - write_to_vec(proof_result, &[ProofTokenType::AbsentPath.into()]) + path_query + .query_items_at_path(path.as_slice()) + .ok_or(Error::CorruptedPath(format!( + "prove subqueries: path {} should be part of path_query {}", + path.iter() + .map(|a| hex_to_ascii(a)) + .collect::>() + .join("/"), + path_query + ))) ); - let mut current_path: Vec<&[u8]> = vec![]; - let mut split_path = path_slices.split_first(); - while let Some((key, path_slice)) = split_path { - let subtree = self - .open_non_transactional_merk_at_path(current_path.as_slice().into(), None) - .unwrap_add_cost(&mut cost); + let subtree = cost_return_on_error!( + &mut cost, + self.open_non_transactional_merk_at_path(path.as_slice().into(), None) + ); - let Ok(subtree) = subtree else { - break; - }; + let limit = if path.len() < path_query.path.len() { + // There is no need for a limit because we are only asking for a single item + None + } else { + *overall_limit + }; - let has_item = Element::get(&subtree, key, true).unwrap_add_cost(&mut cost); + let mut merk_proof = cost_return_on_error!( + &mut cost, + self.generate_merk_proof(&subtree, &query.items, query.left_to_right, limit) + ); - let mut next_key_query = Query::new(); - next_key_query.insert_key(key.to_vec()); - cost_return_on_error!( - &mut cost, - self.generate_and_store_merk_proof( - ¤t_path.as_slice().into(), - &subtree, - &next_key_query, - (None, None), - ProofTokenType::Merk, - proof_result, - is_verbose, - current_path.iter().last().unwrap_or(&(&[][..])) - ) + #[cfg(feature = "proof_debug")] + { + println!( + "generated merk proof at level path level [{}], limit is {:?}, {}", + path.iter() + .map(|a| hex_to_ascii(a)) + .collect::>() + .join("/"), + overall_limit, + if query.left_to_right { + "left to right" + } else { + "right to left" + } ); - - current_path.push(key); - - if has_item.is_err() || path_slice.is_empty() { - // reached last key - break; - } - - split_path = path_slice.split_first(); } - Ok(()).wrap_with_cost(cost) - } + let mut lower_layers = BTreeMap::new(); - /// Converts Items to Node::KV from Node::KVValueHash - /// Converts References to Node::KVRefValueHash and sets the value to the - /// referenced element - fn post_process_proof>( - &self, - path: &SubtreePath, - proof_result: &mut ProofWithoutEncodingResult, - ) -> CostResult<(), Error> { - let mut cost = OperationCost::default(); + let mut has_a_result_at_level = false; + let mut done_with_results = false; - for op in proof_result.proof.iter_mut() { + for op in merk_proof.proof.iter_mut() { + done_with_results |= overall_limit == &Some(0); match op { Op::Push(node) | Op::PushInverted(node) => match node { - Node::KV(key, value) | Node::KVValueHash(key, value, ..) => { + Node::KV(key, value) | Node::KVValueHash(key, value, ..) + if !done_with_results => + { let elem = Element::deserialize(value); match elem { Ok(Element::Reference(reference_path, ..)) => { @@ -665,11 +257,82 @@ impl GroveDb { key.to_owned(), serialized_referenced_elem.expect("confirmed ok above"), value_hash(value).unwrap_add_cost(&mut cost), - ) + ); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } + has_a_result_at_level |= true; + } + Ok(Element::Item(..)) if !done_with_results => { + #[cfg(feature = "proof_debug")] + { + println!("found {}", hex_to_ascii(key)); + } + *node = Node::KV(key.to_owned(), value.to_owned()); + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } + has_a_result_at_level |= true; + } + Ok(Element::Tree(Some(_), _)) | Ok(Element::SumTree(Some(_), ..)) + if !done_with_results + && query.has_subquery_or_matching_in_path_on_key(key) => + { + #[cfg(feature = "proof_debug")] + { + println!( + "found tree {}, query is {}", + hex_to_ascii(key), + query + ); + } + // We only want to check in sub nodes for the proof if the tree has + // elements + let mut lower_path = path.clone(); + lower_path.push(key.as_slice()); + + let previous_limit = *overall_limit; + + let layer_proof = cost_return_on_error!( + &mut cost, + self.prove_subqueries( + lower_path, + path_query, + overall_limit, + prove_options, + ) + ); + + if previous_limit != *overall_limit { + // a lower layer updated the limit, don't subtract 1 at this + // level + has_a_result_at_level |= true; + } + lower_layers.insert(key.clone(), layer_proof); } - Ok(Element::Item(..)) => { - *node = Node::KV(key.to_owned(), value.to_owned()) + + Ok(Element::Tree(..)) | Ok(Element::SumTree(..)) + if !done_with_results => + { + #[cfg(feature = "proof_debug")] + { + println!( + "found tree {}, no subquery query is {:?}", + hex_to_ascii(key), + query + ); + } + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } + has_a_result_at_level |= true; } + // todo: transform the unused trees into a Hash or KVHash to make proof + // smaller Ok(Element::Tree(..)) if + // done_with_results => { *node = + // Node::Hash() // we are done with the + // results, we can modify the proof to alter + // } _ => continue, } } @@ -678,236 +341,61 @@ impl GroveDb { _ => continue, } } - Ok(()).wrap_with_cost(cost) - } -} - -#[cfg(test)] -mod tests { - use grovedb_merk::{execute_proof, proofs::Query}; - use grovedb_storage::StorageBatch; - - use crate::{ - operations::proof::util::{ProofReader, ProofTokenType}, - tests::{common::EMPTY_PATH, make_deep_tree, TEST_LEAF}, - GroveDb, - }; - - #[test] - fn test_path_info_encoding_and_decoding() { - let path = vec![b"a".as_slice(), b"b".as_slice(), b"c".as_slice()]; - let mut proof_vector = vec![]; - GroveDb::generate_and_store_path_proof(path.clone(), &mut proof_vector) - .unwrap() - .unwrap(); - - let mut proof_reader = ProofReader::new(proof_vector.as_slice()); - let decoded_path = proof_reader.read_path_info().unwrap(); - - assert_eq!(path, decoded_path); - } - - #[test] - fn test_reading_of_verbose_proofs() { - let db = make_deep_tree(); - let path = vec![TEST_LEAF, b"innertree"]; - let mut query = Query::new(); - query.insert_all(); + if !has_a_result_at_level + && !done_with_results + && prove_options.decrease_limit_on_empty_sub_query_result + { + #[cfg(feature = "proof_debug")] + { + println!( + "no results at level {}", + path.iter() + .map(|a| hex_to_ascii(a)) + .collect::>() + .join("/") + ); + } + if let Some(limit) = overall_limit.as_mut() { + *limit -= 1; + } + } - let batch = StorageBatch::new(); + let mut serialized_merk_proof = Vec::with_capacity(1024); + encode_into(merk_proof.proof.iter(), &mut serialized_merk_proof); - let merk = db - .open_non_transactional_merk_at_path( - [TEST_LEAF, b"innertree"].as_ref().into(), - Some(&batch), - ) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - (None, None), - ProofTokenType::Merk, - &mut proof, - true, - b"innertree", - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(b"innertree".to_vec())); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // what is the key is empty?? - let merk = db - .open_non_transactional_merk_at_path(EMPTY_PATH, Some(&batch)) - .unwrap() - .unwrap(); - let expected_root_hash = merk.root_hash().unwrap(); - - let mut proof = vec![]; - db.generate_and_store_merk_proof( - &EMPTY_PATH, - &merk, - &query, - (None, None), - ProofTokenType::Merk, - &mut proof, - true, - &[], - ) - .unwrap() - .unwrap(); - assert_ne!(proof.len(), 0); - - let mut proof_reader = ProofReader::new(&proof); - let (proof_token_type, proof, key) = proof_reader.read_verbose_proof().unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - assert_eq!(key, Some(vec![])); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, expected_root_hash); - assert_eq!(result_set.result_set.len(), 3); + Ok(LayerProof { + merk_proof: serialized_merk_proof, + lower_layers, + }) + .wrap_with_cost(cost) } - #[test] - fn test_reading_verbose_proof_at_key() { - // going to generate an array of multiple proofs with different keys - let db = make_deep_tree(); - let mut proofs = vec![]; - - let mut query = Query::new(); - query.insert_all(); - - // insert all under inner tree - let path = vec![TEST_LEAF, b"innertree"]; - - let batch = StorageBatch::new(); - - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - (None, None), - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all under innertree4 - let path = vec![TEST_LEAF, b"innertree4"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let inner_tree_4_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - (None, None), - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // insert all for deeper_1 - let path: Vec<&[u8]> = vec![b"deep_leaf", b"deep_node_1", b"deeper_1"]; - let merk = db - .open_non_transactional_merk_at_path(path.as_slice().into(), Some(&batch)) - .unwrap() - .unwrap(); - let deeper_1_root_hash = merk.root_hash().unwrap(); - db.generate_and_store_merk_proof( - &path.as_slice().into(), - &merk, - &query, - (None, None), - ProofTokenType::Merk, - &mut proofs, - true, - path.iter().last().unwrap_or(&(&[][..])), - ) - .unwrap() - .unwrap(); - - // read the proof at innertree - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at innertree4 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = proof_reader - .read_verbose_proof_at_key(b"innertree4") - .unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, inner_tree_4_root_hash); - assert_eq!(result_set.result_set.len(), 2); - - // read the proof at deeper_1 - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let (proof_token_type, proof) = - proof_reader.read_verbose_proof_at_key(b"deeper_1").unwrap(); - - assert_eq!(proof_token_type, ProofTokenType::Merk); - - let (root_hash, result_set) = execute_proof(&proof, &query, None, None, true) - .unwrap() - .unwrap(); - assert_eq!(root_hash, deeper_1_root_hash); - assert_eq!(result_set.result_set.len(), 3); - - // read the proof at an invalid key - let contextual_proof = proofs.clone(); - let mut proof_reader = ProofReader::new(&contextual_proof); - let reading_result = proof_reader.read_verbose_proof_at_key(b"unknown_key"); - assert!(reading_result.is_err()) + /// Generates query proof given a subtree and appends the result to a proof + /// list + fn generate_merk_proof<'a, S>( + &self, + subtree: &'a Merk, + query_items: &[QueryItem], + left_to_right: bool, + limit: Option, + ) -> CostResult + where + S: StorageContext<'a> + 'a, + { + subtree + .prove_unchecked_query_items(query_items, limit, left_to_right) + .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) + .map_err(|e| { + Error::InternalError(format!( + "failed to generate proof for query_items [{}] error is : {}", + query_items + .iter() + .map(|e| e.to_string()) + .collect::>() + .join(", "), + e + )) + }) } } diff --git a/grovedb/src/operations/proof/mod.rs b/grovedb/src/operations/proof/mod.rs new file mode 100644 index 000000000..88243d59d --- /dev/null +++ b/grovedb/src/operations/proof/mod.rs @@ -0,0 +1,165 @@ +//! Proof operations + +#[cfg(feature = "full")] +mod generate; +pub mod util; +mod verify; + +use std::{collections::BTreeMap, fmt}; + +use bincode::{Decode, Encode}; +use derive_more::From; +use grovedb_merk::proofs::{query::Key, Decoder, Node, Op}; + +use crate::operations::proof::util::{element_hex_to_ascii, hex_to_ascii}; + +#[derive(Debug, Clone, Copy, Encode, Decode)] +pub struct ProveOptions { + /// This tells the proof system to decrease the available limit of the query + /// by 1 in the case of empty subtrees. Generally this should be set to + /// true. The case where this could be set to false is if there is a + /// known structure where we know that there are only a few empty + /// subtrees. + /// + /// !!! Warning !!! Be very careful: + /// If this is set to `false` then you must be sure that the sub queries do + /// not match many trees, Otherwise you could crash the system as the + /// proof system goes through millions of subtrees and eventually runs + /// out of memory + pub decrease_limit_on_empty_sub_query_result: bool, +} + +impl fmt::Display for ProveOptions { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProveOptions {{ decrease_limit_on_empty_sub_query_result: {} }}", + self.decrease_limit_on_empty_sub_query_result + ) + } +} + +impl Default for ProveOptions { + fn default() -> Self { + ProveOptions { + decrease_limit_on_empty_sub_query_result: true, + } + } +} + +#[derive(Encode, Decode)] +pub struct LayerProof { + pub merk_proof: Vec, + pub lower_layers: BTreeMap, +} + +#[derive(Encode, Decode, From)] +pub enum GroveDBProof { + V0(GroveDBProofV0), +} + +#[derive(Encode, Decode)] +pub struct GroveDBProofV0 { + pub root_layer: LayerProof, + pub prove_options: ProveOptions, +} + +impl fmt::Display for LayerProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "LayerProof {{")?; + writeln!(f, " merk_proof: {}", decode_merk_proof(&self.merk_proof))?; + if !self.lower_layers.is_empty() { + writeln!(f, " lower_layers: {{")?; + for (key, layer_proof) in &self.lower_layers { + writeln!(f, " {} => {{", hex_to_ascii(key))?; + for line in format!("{}", layer_proof).lines() { + writeln!(f, " {}", line)?; + } + writeln!(f, " }}")?; + } + writeln!(f, " }}")?; + } + write!(f, "}}") + } +} + +impl fmt::Display for GroveDBProof { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + GroveDBProof::V0(proof) => write!(f, "{}", proof), + } + } +} + +impl fmt::Display for GroveDBProofV0 { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "GroveDBProofV0 {{")?; + for line in format!("{}", self.root_layer).lines() { + writeln!(f, " {}", line)?; + } + write!(f, "}}") + } +} + +fn decode_merk_proof(proof: &[u8]) -> String { + let mut result = String::new(); + let ops = Decoder::new(proof); + + for (i, op) in ops.enumerate() { + match op { + Ok(op) => { + result.push_str(&format!("\n {}: {}", i, op_to_string(&op))); + } + Err(e) => { + result.push_str(&format!("\n {}: Error decoding op: {}", i, e)); + } + } + } + + result +} + +fn op_to_string(op: &Op) -> String { + match op { + Op::Push(node) => format!("Push({})", node_to_string(node)), + Op::PushInverted(node) => format!("PushInverted({})", node_to_string(node)), + Op::Parent => "Parent".to_string(), + Op::Child => "Child".to_string(), + Op::ParentInverted => "ParentInverted".to_string(), + Op::ChildInverted => "ChildInverted".to_string(), + } +} + +fn node_to_string(node: &Node) -> String { + match node { + Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), + Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), + Node::KV(key, value) => { + format!("KV({}, {})", hex_to_ascii(key), element_hex_to_ascii(value)) + } + Node::KVValueHash(key, value, value_hash) => format!( + "KVValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVDigest(key, value_hash) => format!( + "KVDigest({}, HASH[{}])", + hex_to_ascii(key), + hex::encode(value_hash) + ), + Node::KVRefValueHash(key, value, value_hash) => format!( + "KVRefValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( + "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", + hex_to_ascii(key), + element_hex_to_ascii(value), + hex::encode(value_hash), + feature_type + ), + } +} diff --git a/grovedb/src/operations/proof/util.rs b/grovedb/src/operations/proof/util.rs index 82e8c5855..50480c13b 100644 --- a/grovedb/src/operations/proof/util.rs +++ b/grovedb/src/operations/proof/util.rs @@ -1,412 +1,165 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - use std::fmt; -#[cfg(any(feature = "full", feature = "verify"))] -use std::io::Read; -#[cfg(feature = "full")] -use std::io::Write; use grovedb_merk::{ - proofs::query::{Key, Path, ProvedKeyValue}, - CryptoHash, + proofs::query::{Key, Path, ProvedKeyOptionalValue, ProvedKeyValue}, + CryptoHash, Error, }; -#[cfg(any(feature = "full", feature = "verify"))] -use integer_encoding::{VarInt, VarIntReader}; -#[cfg(any(feature = "full", feature = "verify"))] -use crate::Error; -use crate::{operations::proof::verify::ProvedKeyValues, reference_path::ReferencePathType}; +use crate::Element; #[cfg(any(feature = "full", feature = "verify"))] -pub const EMPTY_TREE_HASH: [u8; 32] = [0; 32]; - -pub type ProofTokenInfo = (ProofTokenType, Vec, Option>); +pub type ProvedKeyValues = Vec; #[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, PartialEq, Eq)] -/// Proof type -// TODO: there might be a better name for this -pub enum ProofTokenType { - Merk, - SizedMerk, - EmptyTree, - AbsentPath, - PathInfo, - Invalid, -} +pub type ProvedKeyOptionalValues = Vec; #[cfg(any(feature = "full", feature = "verify"))] -impl fmt::Display for ProofTokenType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let variant_str = match self { - ProofTokenType::Merk => "Merk", - ProofTokenType::SizedMerk => "SizedMerk", - ProofTokenType::EmptyTree => "EmptyTree", - ProofTokenType::AbsentPath => "AbsentPath", - ProofTokenType::PathInfo => "PathInfo", - ProofTokenType::Invalid => "Invalid", - }; - write!(f, "{}", variant_str) - } -} +pub type ProvedPathKeyValues = Vec; #[cfg(any(feature = "full", feature = "verify"))] -impl From for u8 { - fn from(proof_token_type: ProofTokenType) -> Self { - match proof_token_type { - ProofTokenType::Merk => 0x01, - ProofTokenType::SizedMerk => 0x02, - ProofTokenType::EmptyTree => 0x04, - ProofTokenType::AbsentPath => 0x05, - ProofTokenType::PathInfo => 0x06, - ProofTokenType::Invalid => 0x10, - } - } -} +pub type ProvedPathKeyOptionalValues = Vec; +/// Proved path-key-value #[cfg(any(feature = "full", feature = "verify"))] -impl From for ProofTokenType { - fn from(val: u8) -> Self { - match val { - 0x01 => ProofTokenType::Merk, - 0x02 => ProofTokenType::SizedMerk, - 0x04 => ProofTokenType::EmptyTree, - 0x05 => ProofTokenType::AbsentPath, - 0x06 => ProofTokenType::PathInfo, - _ => ProofTokenType::Invalid, - } - } +#[derive(Debug, PartialEq, Eq)] +pub struct ProvedPathKeyOptionalValue { + /// Path + pub path: Path, + /// Key + pub key: Key, + /// Value + pub value: Option>, + /// Proof + pub proof: CryptoHash, } #[cfg(any(feature = "full", feature = "verify"))] -impl ProofTokenType { - pub fn u8_to_display(val: u8) -> String { - match val { - 0x01 => "merk".to_string(), - 0x02 => "sized merk".to_string(), - 0x04 => "empty tree".to_string(), - 0x05 => "absent path".to_string(), - 0x06 => "path info".to_string(), - v => format!("invalid proof token {}", v), - } +impl fmt::Display for ProvedPathKeyOptionalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "ProvedPathKeyValue {{")?; + writeln!( + f, + " path: [{}],", + self.path + .iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join(", ") + )?; + writeln!(f, " key: {},", hex_to_ascii(&self.key))?; + writeln!( + f, + " value: {},", + optional_element_hex_to_ascii(self.value.as_ref()) + )?; + writeln!(f, " proof: {}", hex::encode(self.proof))?; + write!(f, "}}") } } +/// Proved path-key-value #[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug)] -// TODO: possibility for a proof writer?? -/// Proof reader -pub struct ProofReader<'a> { - proof_data: &'a [u8], - is_verbose: bool, +#[derive(Debug, PartialEq, Eq)] +pub struct ProvedPathKeyValue { + /// Path + pub path: Path, + /// Key + pub key: Key, + /// Value + pub value: Vec, + /// Proof + pub proof: CryptoHash, } #[cfg(any(feature = "full", feature = "verify"))] -impl<'a> ProofReader<'a> { - /// New proof reader - pub fn new(proof_data: &'a [u8]) -> Self { - Self { - proof_data, - is_verbose: false, - } - } - - /// New proof reader with verbose_status - pub fn new_with_verbose_status(proof_data: &'a [u8], is_verbose: bool) -> Self { - Self { - proof_data, - is_verbose, - } - } - - /// For non verbose proof read the immediate next proof, for verbose proof - /// read the first proof that matches a given key - pub fn read_next_proof(&mut self, key: &[u8]) -> Result<(ProofTokenType, Vec), Error> { - if self.is_verbose { - self.read_verbose_proof_at_key(key) - } else { - let (proof_token_type, proof, _) = self.read_proof_with_optional_type(None)?; - Ok((proof_token_type, proof)) - } - } - - /// Read the next proof, return the proof type - pub fn read_proof(&mut self) -> Result { - if self.is_verbose { - self.read_verbose_proof_with_optional_type(None) - } else { - self.read_proof_with_optional_type(None) - } - } - - /// Read verbose proof - pub fn read_verbose_proof(&mut self) -> Result { - self.read_verbose_proof_with_optional_type(None) - } - - /// Reads data from proof into slice of specific size - fn read_into_slice(&mut self, buf: &mut [u8]) -> Result { - self.proof_data - .read(buf) - .map_err(|_| Error::CorruptedData(String::from("failed to read proof data"))) - } - - /// Read varint encoded length information from proof data - fn read_length_data(&mut self) -> Result { - self.proof_data - .read_varint() - .map_err(|_| Error::InvalidProof("expected length data".to_string())) - } - - /// Read proof with optional type - pub fn read_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, _) = - self.read_proof_internal_with_optional_type(expected_data_type_option, false)?; - Ok((proof_token_type, proof, None)) +impl fmt::Display for ProvedPathKeyValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "ProvedPathKeyValue {{")?; + writeln!( + f, + " path: [{}],", + self.path + .iter() + .map(|p| hex_to_ascii(p)) + .collect::>() + .join(", ") + )?; + writeln!(f, " key: {},", hex_to_ascii(&self.key))?; + writeln!(f, " value: {},", element_hex_to_ascii(self.value.as_ref()))?; + writeln!(f, " proof: {}", hex::encode(self.proof))?; + write!(f, "}}") } +} - /// Read verbose proof with optional type - pub fn read_verbose_proof_with_optional_type( - &mut self, - expected_data_type_option: Option, - ) -> Result { - let (proof_token_type, proof, key) = - self.read_proof_internal_with_optional_type(expected_data_type_option, true)?; - Ok(( - proof_token_type, +impl From for ProvedPathKeyOptionalValue { + fn from(value: ProvedPathKeyValue) -> Self { + let ProvedPathKeyValue { + path, + key, + value, proof, - Some(key.ok_or(Error::InvalidProof( - "key must exist for verbose merk proofs".to_string(), - ))?), - )) - } + } = value; - /// Read verbose proof at key - /// Returns an error if it can't find a proof for that key - pub fn read_verbose_proof_at_key( - &mut self, - expected_key: &[u8], - ) -> Result<(ProofTokenType, Vec), Error> { - let (proof_token_type, proof, _) = loop { - let (proof_token_type, proof, key) = self.read_verbose_proof()?; - let key = key.expect("read_verbose_proof enforces that this exists"); - if key.as_slice() == expected_key { - break (proof_token_type, proof, key); - } - }; - - Ok((proof_token_type, proof)) - } - - /// Read proof with optional type - pub fn read_proof_internal_with_optional_type( - &mut self, - expected_data_type_option: Option, - is_verbose: bool, - ) -> Result { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if let Some(expected_data_type) = expected_data_type_option { - if data_type[0] != expected_data_type { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected {}, got {}", - expected_data_type, data_type[0] - ))); - } - } - - let proof_token_type: ProofTokenType = data_type[0].into(); - - if proof_token_type == ProofTokenType::EmptyTree - || proof_token_type == ProofTokenType::AbsentPath - { - return Ok((proof_token_type, vec![], None)); - } - - let (proof, key) = if proof_token_type == ProofTokenType::Merk - || proof_token_type == ProofTokenType::SizedMerk - { - // if verbose we need to read the key first - let key = if is_verbose { - let key_length = self.read_length_data()?; - - let mut key = vec![0; key_length]; - self.read_into_slice(&mut key)?; - - Some(key) - } else { - None - }; - - let proof_length = self.read_length_data()?; - - let mut proof = vec![0; proof_length]; - self.read_into_slice(&mut proof)?; - - (proof, key) - } else { - return Err(Error::InvalidProof( - "expected merk or sized merk proof".to_string(), - )); - }; - - Ok((proof_token_type, proof, key)) - } - - /// Reads path information from the proof vector - pub fn read_path_info(&mut self) -> Result>, Error> { - let mut data_type = [0; 1]; - self.read_into_slice(&mut data_type)?; - - if data_type != [Into::::into(ProofTokenType::PathInfo)] { - return Err(Error::InvalidProof(format!( - "wrong data_type, expected path_info, got {}", - ProofTokenType::u8_to_display(data_type[0]) - ))); - } - - let mut path = vec![]; - let path_slice_len = self.read_length_data()?; - - for _ in 0..path_slice_len { - let path_len = self.read_length_data()?; - let mut path_value = vec![0; path_len]; - self.read_into_slice(&mut path_value)?; - path.push(path_value); + ProvedPathKeyOptionalValue { + path, + key, + value: Some(value), + proof, } - - Ok(path) } } -#[cfg(feature = "full")] -/// Write to vec -// TODO: this can error out handle the error -pub fn write_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - dest.write_all(value) - .map_err(|_e| Error::InternalError("failed to write to vector")) -} +impl TryFrom for ProvedPathKeyValue { + type Error = Error; -#[cfg(feature = "full")] -/// Write a slice to the vector, first write the length of the slice -pub fn write_slice_to_vec(dest: &mut W, value: &[u8]) -> Result<(), Error> { - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - write_to_vec(dest, value)?; - Ok(()) -} - -#[cfg(feature = "full")] -/// Write a slice of a slice to a flat vector:w -pub fn write_slice_of_slice_to_slice(dest: &mut W, value: &[&[u8]]) -> Result<(), Error> { - // write the number of slices we are about to write - write_to_vec(dest, value.len().encode_var_vec().as_slice())?; - for inner_slice in value { - write_slice_to_vec(dest, inner_slice)?; + fn try_from(value: ProvedPathKeyOptionalValue) -> Result { + let ProvedPathKeyOptionalValue { + path, + key, + value, + proof, + } = value; + let value = value.ok_or(Error::InvalidProofError(format!( + "expected {}", + hex_to_ascii(&key) + )))?; + Ok(ProvedPathKeyValue { + path, + key, + value, + proof, + }) } - Ok(()) } -#[cfg(any(feature = "full", feature = "verify"))] -pub fn reduce_limit_and_offset_by( - limit: &mut Option, - offset: &mut Option, - n: u16, -) -> bool { - let mut skip_limit = false; - let mut n = n; - - if let Some(offset_value) = *offset { - if offset_value > 0 { - if offset_value >= n { - *offset = Some(offset_value - n); - n = 0; - } else { - *offset = Some(0); - n -= offset_value; - } - skip_limit = true; - } - } - - if let Some(limit_value) = *limit { - if !skip_limit && limit_value > 0 { - if limit_value >= n { - *limit = Some(limit_value - n); - } else { - *limit = Some(0); - } +impl ProvedPathKeyValue { + // TODO: make path a reference + /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a + /// Path + pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyValue) -> Self { + Self { + path, + key: proved_key_value.key, + value: proved_key_value.value, + proof: proved_key_value.proof, } } - skip_limit -} - -pub fn increase_limit_and_offset_by( - limit: &mut Option, - offset: &mut Option, - limit_inc: u16, - offset_inc: u16, -) { - if let Some(offset_value) = *offset { - *offset = Some(offset_value + offset_inc); - } - if let Some(limit_value) = *limit { - *limit = Some(limit_value + limit_inc); + /// Transforms multiple ProvedKeyValues to their equivalent + /// ProvedPathKeyValue given a Path + pub fn from_proved_key_values(path: Path, proved_key_values: ProvedKeyValues) -> Vec { + proved_key_values + .into_iter() + .map(|pkv| Self::from_proved_key_value(path.clone(), pkv)) + .collect() } } -/// Proved path-key-values -pub type ProvedPathKeyValues = Vec; - -/// Proved path-key-value -#[cfg(any(feature = "full", feature = "verify"))] -#[derive(Debug, PartialEq, Eq)] -pub struct ProvedPathKeyValue { - /// Path - pub path: Path, - /// Key - pub key: Key, - /// Value - pub value: Vec, - /// Proof - pub proof: CryptoHash, -} - -impl ProvedPathKeyValue { +impl ProvedPathKeyOptionalValue { // TODO: make path a reference /// Consumes the ProvedKeyValue and returns a ProvedPathKeyValue given a /// Path - pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyValue) -> Self { + pub fn from_proved_key_value(path: Path, proved_key_value: ProvedKeyOptionalValue) -> Self { Self { path, key: proved_key_value.key, @@ -417,7 +170,10 @@ impl ProvedPathKeyValue { /// Transforms multiple ProvedKeyValues to their equivalent /// ProvedPathKeyValue given a Path - pub fn from_proved_key_values(path: Path, proved_key_values: ProvedKeyValues) -> Vec { + pub fn from_proved_key_values( + path: Path, + proved_key_values: ProvedKeyOptionalValues, + ) -> Vec { proved_key_values .into_iter() .map(|pkv| Self::from_proved_key_value(path.clone(), pkv)) @@ -427,46 +183,26 @@ impl ProvedPathKeyValue { #[cfg(test)] mod tests { - use grovedb_merk::proofs::query::ProvedKeyValue; - - use crate::operations::proof::util::{ProofTokenType, ProvedPathKeyValue}; + use grovedb_merk::proofs::query::ProvedKeyOptionalValue; - #[test] - fn test_proof_token_type_encoding() { - assert_eq!(0x01_u8, Into::::into(ProofTokenType::Merk)); - assert_eq!(0x02_u8, Into::::into(ProofTokenType::SizedMerk)); - assert_eq!(0x04_u8, Into::::into(ProofTokenType::EmptyTree)); - assert_eq!(0x05_u8, Into::::into(ProofTokenType::AbsentPath)); - assert_eq!(0x06_u8, Into::::into(ProofTokenType::PathInfo)); - assert_eq!(0x10_u8, Into::::into(ProofTokenType::Invalid)); - } - - #[test] - fn test_proof_token_type_decoding() { - assert_eq!(ProofTokenType::Merk, 0x01_u8.into()); - assert_eq!(ProofTokenType::SizedMerk, 0x02_u8.into()); - assert_eq!(ProofTokenType::EmptyTree, 0x04_u8.into()); - assert_eq!(ProofTokenType::AbsentPath, 0x05_u8.into()); - assert_eq!(ProofTokenType::PathInfo, 0x06_u8.into()); - assert_eq!(ProofTokenType::Invalid, 0x10_u8.into()); - } + use crate::operations::proof::util::ProvedPathKeyOptionalValue; #[test] fn test_proved_path_from_single_proved_key_value() { let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value = ProvedKeyValue { + let proved_key_value = ProvedKeyOptionalValue { key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32], }; let proved_path_key_value = - ProvedPathKeyValue::from_proved_key_value(path.clone(), proved_key_value); + ProvedPathKeyOptionalValue::from_proved_key_value(path.clone(), proved_key_value); assert_eq!( proved_path_key_value, - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path, key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32] } ); @@ -475,51 +211,116 @@ mod tests { #[test] fn test_many_proved_path_from_many_proved_key_value() { let path = vec![b"1".to_vec(), b"2".to_vec()]; - let proved_key_value_a = ProvedKeyValue { + let proved_key_value_a = ProvedKeyOptionalValue { key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32], }; - let proved_key_value_b = ProvedKeyValue { + let proved_key_value_b = ProvedKeyOptionalValue { key: b"b".to_vec(), - value: vec![5, 7], + value: Some(vec![5, 7]), proof: [1; 32], }; - let proved_key_value_c = ProvedKeyValue { + let proved_key_value_c = ProvedKeyOptionalValue { key: b"c".to_vec(), - value: vec![6, 7], + value: Some(vec![6, 7]), proof: [2; 32], }; - let proved_key_values = vec![proved_key_value_a, proved_key_value_b, proved_key_value_c]; + let proved_key_value_d = ProvedKeyOptionalValue { + key: b"d".to_vec(), + value: None, + proof: [2; 32], + }; + let proved_key_values = vec![ + proved_key_value_a, + proved_key_value_b, + proved_key_value_c, + proved_key_value_d, + ]; let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path.clone(), proved_key_values); - assert_eq!(proved_path_key_values.len(), 3); + ProvedPathKeyOptionalValue::from_proved_key_values(path.clone(), proved_key_values); + assert_eq!(proved_path_key_values.len(), 4); assert_eq!( proved_path_key_values[0], - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path: path.clone(), key: b"a".to_vec(), - value: vec![5, 6], + value: Some(vec![5, 6]), proof: [0; 32] } ); assert_eq!( proved_path_key_values[1], - ProvedPathKeyValue { + ProvedPathKeyOptionalValue { path: path.clone(), key: b"b".to_vec(), - value: vec![5, 7], + value: Some(vec![5, 7]), proof: [1; 32] } ); assert_eq!( proved_path_key_values[2], - ProvedPathKeyValue { - path, + ProvedPathKeyOptionalValue { + path: path.clone(), key: b"c".to_vec(), - value: vec![6, 7], + value: Some(vec![6, 7]), + proof: [2; 32] + } + ); + + assert_eq!( + proved_path_key_values[3], + ProvedPathKeyOptionalValue { + path, + key: b"d".to_vec(), + value: None, proof: [2; 32] } ); } } + +pub fn hex_to_ascii(hex_value: &[u8]) -> String { + // Define the set of allowed characters + const ALLOWED_CHARS: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 0123456789_-/\\[]@"; + + // Check if all characters in hex_value are allowed + if hex_value.iter().all(|&c| ALLOWED_CHARS.contains(&c)) { + // Try to convert to UTF-8 + String::from_utf8(hex_value.to_vec()) + .unwrap_or_else(|_| format!("0x{}", hex::encode(hex_value))) + } else { + // Hex encode and prepend "0x" + format!("0x{}", hex::encode(hex_value)) + } +} + +pub fn path_hex_to_ascii(path: &Path) -> String { + path.iter() + .map(|e| hex_to_ascii(e.as_slice())) + .collect::>() + .join("/") +} + +pub fn path_as_slices_hex_to_ascii(path: &[&[u8]]) -> String { + path.into_iter() + .map(|e| hex_to_ascii(e)) + .collect::>() + .join("/") +} +pub fn optional_element_hex_to_ascii(hex_value: Option<&Vec>) -> String { + match hex_value { + None => "None".to_string(), + Some(hex_value) => Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)), + } +} + +pub fn element_hex_to_ascii(hex_value: &[u8]) -> String { + Element::deserialize(hex_value) + .map(|e| e.to_string()) + .unwrap_or_else(|_| hex::encode(hex_value)) +} diff --git a/grovedb/src/operations/proof/verify.rs b/grovedb/src/operations/proof/verify.rs index 7a347c152..4e0375e37 100644 --- a/grovedb/src/operations/proof/verify.rs +++ b/grovedb/src/operations/proof/verify.rs @@ -1,200 +1,408 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Verify proof operations - -use std::{borrow::Cow, collections::BTreeMap}; - -use grovedb_merk::proofs::query::PathKey; -#[cfg(any(feature = "full", feature = "verify"))] -pub use grovedb_merk::proofs::query::{Path, ProvedKeyValue}; -#[cfg(any(feature = "full", feature = "verify"))] +use std::collections::{BTreeMap, BTreeSet}; + use grovedb_merk::{ - proofs::Query, - tree::{combine_hash, value_hash as value_hash_fn}, + proofs::{ + query::{PathKey, VerifyOptions}, + Query, + }, + tree::{combine_hash, value_hash}, CryptoHash, }; -use crate::{ - operations::proof::util::{ - reduce_limit_and_offset_by, ProvedPathKeyValue, ProvedPathKeyValues, - }, - query_result_type::PathKeyOptionalElementTrio, - versioning::read_and_consume_proof_version, - SizedQuery, +#[cfg(feature = "proof_debug")] +use crate::operations::proof::util::{ + hex_to_ascii, path_as_slices_hex_to_ascii, path_hex_to_ascii, }; -#[cfg(any(feature = "full", feature = "verify"))] use crate::{ - operations::proof::util::{ - ProofReader, ProofTokenType, ProofTokenType::AbsentPath, EMPTY_TREE_HASH, + operations::proof::{ + util::{ProvedPathKeyOptionalValue, ProvedPathKeyValues}, + GroveDBProof, GroveDBProofV0, LayerProof, ProveOptions, }, + query_result_type::PathKeyOptionalElementTrio, Element, Error, GroveDb, PathQuery, }; -#[cfg(any(feature = "full", feature = "verify"))] -pub type ProvedKeyValues = Vec; - -#[cfg(any(feature = "full", feature = "verify"))] -type EncounteredAbsence = bool; - -#[cfg(any(feature = "full", feature = "verify"))] impl GroveDb { - /// Verify proof given a path query - /// Returns the root hash + deserialized elements - pub fn verify_query( + pub fn verify_query_with_options( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) + options: VerifyOptions, + ) -> Result<(CryptoHash, Vec), Error> { + if options.absence_proofs_for_non_existing_searched_keys { + // must have a limit + query.query.limit.ok_or(Error::NotSupported( + "limits must be set in verify_query_with_absence_proof".to_string(), + ))?; + } + + // must have no offset + if query.query.offset.is_some() { + return Err(Error::NotSupported( + "offsets in path queries are not supported for proofs".to_string(), + )); + } + + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? + .0; + + let (root_hash, result) = Self::verify_proof_internal(&grovedb_proof, query, options)?; + + Ok((root_hash, result)) } - /// Verify proof for a given path query returns serialized elements pub fn verify_query_raw( proof: &[u8], query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, false)?; - - Ok((hash, verifier.result_set)) + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { + let config = bincode::config::standard() + .with_big_endian() + .with_no_limit(); + let grovedb_proof: GroveDBProof = bincode::decode_from_slice(proof, config) + .map_err(|e| Error::CorruptedData(format!("unable to decode proof: {}", e)))? + .0; + + let (root_hash, result) = Self::verify_proof_raw_internal( + &grovedb_proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: false, + include_empty_trees_in_result: true, + }, + )?; + + Ok((root_hash, result)) } - /// Verify proof given multiple path queries. - /// If we have more than one path query we merge before performing - /// verification. - pub fn verify_query_many( - proof: &[u8], - query: Vec<&PathQuery>, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - if query.len() > 1 { - let query = PathQuery::merge(query)?; - GroveDb::verify_query_raw(proof, &query) - } else { - GroveDb::verify_query_raw(proof, query[0]) + fn verify_proof_internal( + proof: &GroveDBProof, + query: &PathQuery, + options: VerifyOptions, + ) -> Result<(CryptoHash, Vec), Error> { + match proof { + GroveDBProof::V0(proof_v0) => Self::verify_proof_internal_v0(proof_v0, query, options), } } - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the deserialized result set. - pub fn verify_subset_query( - proof: &[u8], + fn verify_proof_internal_v0( + proof: &GroveDBProofV0, query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - let (root_hash, proved_path_key_values) = Self::verify_subset_query_raw(proof, query)?; - let path_key_optional_elements = proved_path_key_values - .into_iter() - .map(|pkv| pkv.try_into()) - .collect::, Error>>()?; - Ok((root_hash, path_key_optional_elements)) - } + options: VerifyOptions, + ) -> Result<(CryptoHash, Vec), Error> { + let mut result = Vec::new(); + let mut limit = query.query.limit; + let root_hash = Self::verify_layer_proof( + &proof.root_layer, + &proof.prove_options, + query, + &mut limit, + &[], + &mut result, + &options, + )?; + + if options.absence_proofs_for_non_existing_searched_keys { + // must have a limit + let max_results = query.query.limit.ok_or(Error::NotSupported( + "limits must be set in verify_query_with_absence_proof".to_string(), + ))? as usize; + + let terminal_keys = query.terminal_keys(max_results)?; + + // convert the result set to a btree map + let mut result_set_as_map: BTreeMap> = result + .into_iter() + .map(|(path, key, element)| ((path, key), element)) + .collect(); + #[cfg(feature = "proof_debug")] + { + println!( + "terminal keys are [{}] \n result set is [{}]", + terminal_keys + .iter() + .map(|(path, key)| format!( + "path: {} key: {}", + path_hex_to_ascii(path), + hex_to_ascii(key) + )) + .collect::>() + .join(", "), + result_set_as_map + .iter() + .map(|((path, key), e)| { + let element_string = if let Some(e) = e { + e.to_string() + } else { + "None".to_string() + }; + format!( + "path: {} key: {} element: {}", + path_hex_to_ascii(path), + hex_to_ascii(key), + element_string, + ) + }) + .collect::>() + .join(", ") + ); + } - /// Given a verbose proof, we can verify it with a subset path query. - /// Returning the root hash and the serialized result set. - pub fn verify_subset_query_raw( - proof: &[u8], - query: &PathQuery, - ) -> Result<([u8; 32], ProvedPathKeyValues), Error> { - let mut verifier = ProofVerifier::new(query); - let hash = verifier.execute_proof(proof, query, true)?; - Ok((hash, verifier.result_set)) + result = terminal_keys + .into_iter() + .map(|terminal_key| { + let element = result_set_as_map.remove(&terminal_key).flatten(); + (terminal_key.0, terminal_key.1, element) + }) + .collect(); + } + + Ok((root_hash, result)) } - /// Verify non subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_query_with_absence_proof( - proof: &[u8], + fn verify_proof_raw_internal( + proof: &GroveDBProof, query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_query) + options: VerifyOptions, + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { + match proof { + GroveDBProof::V0(proof_v0) => { + Self::verify_proof_raw_internal_v0(proof_v0, query, options) + } + } } - /// Verify subset query return the absence proof - /// Returns all possible keys within the Path Query with an optional Element - /// Value Element is set to None if absent - pub fn verify_subset_query_with_absence_proof( - proof: &[u8], + fn verify_proof_raw_internal_v0( + proof: &GroveDBProofV0, query: &PathQuery, - ) -> Result<([u8; 32], Vec), Error> { - Self::verify_with_absence_proof(proof, query, Self::verify_subset_query) + options: VerifyOptions, + ) -> Result<(CryptoHash, ProvedPathKeyValues), Error> { + let mut result = Vec::new(); + let mut limit = query.query.limit; + let root_hash = Self::verify_layer_proof( + &proof.root_layer, + &proof.prove_options, + query, + &mut limit, + &[], + &mut result, + &options, + )?; + Ok((root_hash, result)) } - /// Verifies the proof and returns both elements in the result set and the - /// elements in query but not in state. - /// Note: This only works for certain path queries. - // TODO: We should not care about terminal keys, as theoretically they can be - // infinite we should perform the absence check solely on the proof and the - // given key, this is a temporary solution - fn verify_with_absence_proof( - proof: &[u8], + fn verify_layer_proof( + layer_proof: &LayerProof, + prove_options: &ProveOptions, query: &PathQuery, - verification_fn: T, - ) -> Result<([u8; 32], Vec), Error> + limit_left: &mut Option, + current_path: &[&[u8]], + result: &mut Vec, + options: &VerifyOptions, + ) -> Result where - T: Fn(&[u8], &PathQuery) -> Result<([u8; 32], Vec), Error>, + T: TryFrom, + Error: From<>::Error>, { - // must have a limit - let max_results = query.query.limit.ok_or(Error::NotSupported( - "limits must be set in verify_query_with_absence_proof".to_string(), - ))? as usize; + let internal_query = + query + .query_items_at_path(current_path) + .ok_or(Error::CorruptedPath(format!( + "verify raw: path {} should be part of path_query {}", + current_path + .iter() + .map(hex::encode) + .collect::>() + .join("/"), + query + )))?; + + let level_query = Query { + items: internal_query.items.to_vec(), + left_to_right: internal_query.left_to_right, + ..Default::default() + }; - // must have no offset - if query.query.offset.is_some() { - return Err(Error::NotSupported( - "offsets are not supported for verify_query_with_absence_proof".to_string(), - )); + let (root_hash, merk_result) = level_query + .execute_proof( + &layer_proof.merk_proof, + *limit_left, + internal_query.left_to_right, + ) + .unwrap() + .map_err(|e| { + eprintln!("{e}"); + Error::InvalidProof(format!("invalid proof verification parameters: {}", e)) + })?; + #[cfg(feature = "proof_debug")] + { + println!( + "current path {} \n merk result is {}", + path_as_slices_hex_to_ascii(current_path), + merk_result + ); } - let terminal_keys = query.terminal_keys(max_results)?; + let mut verified_keys = BTreeSet::new(); - // need to actually verify the query - let (root_hash, result_set) = verification_fn(proof, query)?; + if merk_result.result_set.is_empty() { + if prove_options.decrease_limit_on_empty_sub_query_result { + limit_left.as_mut().map(|limit| *limit -= 1); + } + } else { + for proved_key_value in merk_result.result_set { + let mut path = current_path.to_vec(); + let key = &proved_key_value.key; + let hash = &proved_key_value.proof; + if let Some(value_bytes) = &proved_key_value.value { + let element = Element::deserialize(value_bytes)?; + + verified_keys.insert(key.clone()); + + if let Some(lower_layer) = layer_proof.lower_layers.get(key) { + #[cfg(feature = "proof_debug")] + { + println!("lower layer had key {}", hex_to_ascii(key)); + } + match element { + Element::Tree(Some(_), _) | Element::SumTree(Some(_), ..) => { + path.push(key); + let lower_hash = Self::verify_layer_proof( + lower_layer, + prove_options, + query, + limit_left, + &path, + result, + options, + )?; + let combined_root_hash = + combine_hash(value_hash(value_bytes).value(), &lower_hash) + .value() + .to_owned(); + if hash != &combined_root_hash { + return Err(Error::InvalidProof(format!( + "Mismatch in lower layer hash, expected {}, got {}", + hex::encode(hash), + hex::encode(combined_root_hash) + ))); + } + if limit_left == &Some(0) { + break; + } + } + Element::Tree(None, _) + | Element::SumTree(None, ..) + | Element::SumItem(..) + | Element::Item(..) + | Element::Reference(..) => { + return Err(Error::InvalidProof( + "Proof has lower layer for a non Tree".into(), + )); + } + } + } else if element.is_any_item() + || !internal_query.has_subquery_or_matching_in_path_on_key(key) + && (options.include_empty_trees_in_result + || !matches!(element, Element::Tree(None, _))) + { + let path_key_optional_value = + ProvedPathKeyOptionalValue::from_proved_key_value( + path.iter().map(|p| p.to_vec()).collect(), + proved_key_value, + ); + #[cfg(feature = "proof_debug")] + { + println!( + "pushing {} limit left after is {:?}", + &path_key_optional_value, limit_left + ); + } + result.push(path_key_optional_value.try_into()?); - // convert the result set to a btree map - let mut result_set_as_map: BTreeMap> = result_set - .into_iter() - .map(|(path, key, element)| ((path, key), element)) - .collect(); + limit_left.as_mut().map(|limit| *limit -= 1); + if limit_left == &Some(0) { + break; + } + } else { + #[cfg(feature = "proof_debug")] + { + println!( + "we have subquery on key {} with value {}: {}", + hex_to_ascii(key), + element, + level_query + ) + } + } + } + } + } - let result_set_with_absence: Vec = terminal_keys - .into_iter() - .map(|terminal_key| { - let element = result_set_as_map.remove(&terminal_key).flatten(); - (terminal_key.0, terminal_key.1, element) - }) - .collect(); + Ok(root_hash) + } - Ok((root_hash, result_set_with_absence)) + pub fn verify_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<(CryptoHash, Vec), Error> { + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: true, + include_empty_trees_in_result: false, + }, + ) + } + + pub fn verify_subset_query( + proof: &[u8], + query: &PathQuery, + ) -> Result<(CryptoHash, Vec), Error> { + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: false, + verify_proof_succinctness: false, + include_empty_trees_in_result: false, + }, + ) + } + + pub fn verify_query_with_absence_proof( + proof: &[u8], + query: &PathQuery, + ) -> Result<(CryptoHash, Vec), Error> { + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: true, + include_empty_trees_in_result: false, + }, + ) + } + + pub fn verify_subset_query_with_absence_proof( + proof: &[u8], + query: &PathQuery, + ) -> Result<(CryptoHash, Vec), Error> { + Self::verify_query_with_options( + proof, + query, + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: false, + include_empty_trees_in_result: false, + }, + ) } /// Verify subset proof with a chain of path query functions. @@ -235,698 +443,3 @@ impl GroveDb { Ok((last_root_hash, results)) } } - -#[cfg(any(feature = "full", feature = "verify"))] -/// Proof verifier -struct ProofVerifier { - limit: Option, - offset: Option, - result_set: ProvedPathKeyValues, -} - -#[cfg(any(feature = "full", feature = "verify"))] -impl ProofVerifier { - /// New query - pub fn new(query: &PathQuery) -> Self { - ProofVerifier { - limit: query.query.limit, - offset: query.query.offset, - result_set: vec![], - } - } - - /// Execute proof - pub fn execute_proof( - &mut self, - proof: &[u8], - query: &PathQuery, - is_verbose: bool, - ) -> Result<[u8; 32], Error> { - let (_proof_version, proof) = read_and_consume_proof_version(proof)?; - let mut proof_reader = ProofReader::new_with_verbose_status(proof, is_verbose); - - let path_slices = query.path.iter().map(|x| x.as_slice()).collect::>(); - let mut query = Cow::Borrowed(query); - - // TODO: refactor and add better comments - // if verbose, the first thing we want to do is read the path info - if is_verbose { - let original_path = proof_reader.read_path_info()?; - - if original_path == path_slices { - // do nothing - } else if original_path.len() > path_slices.len() { - // TODO: can we relax this constraint - return Err(Error::InvalidProof( - "original path query path must not be greater than the subset path len" - .to_string(), - )); - } else { - let original_path_in_new_path = original_path - .iter() - .all(|key| path_slices.contains(&key.as_slice())); - - if !original_path_in_new_path { - return Err(Error::InvalidProof( - "the original path should be a subset of the subset path".to_string(), - )); - } else { - // We construct a new path query - let path_not_common = path_slices[original_path.len()..].to_vec(); - let mut path_iter = path_not_common.iter(); - - let mut new_query = Query::new(); - if path_iter.len() >= 1 { - new_query - .insert_key(path_iter.next().expect("confirmed has value").to_vec()); - } - - // need to add the first key to the query - new_query.set_subquery_path(path_iter.map(|a| a.to_vec()).collect()); - new_query.set_subquery(query.query.query.clone()); - - query = Cow::Owned(PathQuery::new( - original_path, - SizedQuery::new(new_query, query.query.limit, query.query.offset), - )); - } - } - } - - let (proof_token_type, proof, _) = proof_reader.read_proof()?; - - let root_hash = if proof_token_type == AbsentPath { - self.verify_absent_path(&mut proof_reader, path_slices)? - } else { - let path_owned = query.path.iter().map(|a| a.to_vec()).collect(); - let mut last_subtree_root_hash = self.execute_subquery_proof( - proof_token_type, - proof, - &mut proof_reader, - query.as_ref(), - path_owned, - )?; - - // validate the path elements are connected - self.verify_path_to_root( - query.as_ref(), - query.path.iter().map(|a| a.as_ref()).collect(), - &mut proof_reader, - &mut last_subtree_root_hash, - )? - }; - - Ok(root_hash) - } - - fn execute_subquery_proof( - &mut self, - proof_token_type: ProofTokenType, - proof: Vec, - proof_reader: &mut ProofReader, - query: &PathQuery, - path: Path, - ) -> Result<[u8; 32], Error> { - let last_root_hash: [u8; 32]; - - match proof_token_type { - ProofTokenType::SizedMerk => { - // verify proof with limit and offset values - let verification_result = self.execute_merk_proof( - ProofTokenType::SizedMerk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = verification_result.0; - } - ProofTokenType::Merk => { - // for non leaf subtrees, we want to prove that all the queried keys - // have an accompanying proof as long as the limit is non zero - // and their child subtree is not empty - let (proof_root_hash, children) = self.execute_merk_proof( - ProofTokenType::Merk, - &proof, - &query.query.query, - query.query.query.left_to_right, - path, - )?; - - last_root_hash = proof_root_hash; - let children = children.ok_or(Error::InvalidProof( - "MERK_PROOF always returns a result set".to_string(), - ))?; - - for proved_path_key_value in children { - let ProvedPathKeyValue { - path, - key, - value: value_bytes, - proof: value_hash, - } = proved_path_key_value; - let child_element = Element::deserialize(value_bytes.as_slice())?; - match child_element { - Element::Tree(expected_root_key, _) - | Element::SumTree(expected_root_key, ..) => { - let mut expected_combined_child_hash = value_hash; - let mut current_value_bytes = value_bytes; - - if self.limit == Some(0) { - // we are done verifying the subqueries - break; - } - - let (subquery_path, subquery_value) = - Element::subquery_paths_and_value_for_sized_query( - &query.query, - key.as_slice(), - ); - - if subquery_value.is_none() && subquery_path.is_none() { - // add this element to the result set - let skip_limit = reduce_limit_and_offset_by( - &mut self.limit, - &mut self.offset, - 1, - ); - - if !skip_limit { - // only insert to the result set if the offset value is not - // greater than 0 - self.result_set.push( - ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: current_value_bytes, - proof: value_hash, - }, - ), - ); - } - - continue; - } - - // What is the equivalent for an empty tree - if expected_root_key.is_none() { - // child node is empty, move on to next - continue; - } - - // update the path, we are about to perform a subquery call - let mut new_path = path.to_owned(); - new_path.push(key); - - if subquery_path.is_some() - && !subquery_path.as_ref().unwrap().is_empty() - { - if subquery_value.is_none() { - self.verify_subquery_path( - proof_reader, - ProofTokenType::SizedMerk, - &mut subquery_path.expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - continue; - } else { - let (_, result_set_opt, encountered_absence) = self - .verify_subquery_path( - proof_reader, - ProofTokenType::Merk, - &mut subquery_path - .expect("confirmed it has a value above"), - &mut expected_combined_child_hash, - &mut current_value_bytes, - &mut new_path, - )?; - - if encountered_absence { - // we hit an absence proof while verifying the subquery path - continue; - } - - let subquery_path_result_set = result_set_opt; - if subquery_path_result_set.is_none() { - // this means a sized proof was generated for the subquery - // key - // which is invalid as there exists a subquery value - return Err(Error::InvalidProof( - "expected unsized proof for subquery path as subquery \ - value exists" - .to_string(), - )); - } - let subquery_path_result_set = - subquery_path_result_set.expect("confirmed exists above"); - - if subquery_path_result_set.is_empty() { - // we have a valid proof that shows the absence of the - // subquery path in the tree, hence the subquery value - // cannot be applied, move on to the next. - continue; - } - - Self::update_root_key_from_subquery_path_element( - &mut expected_combined_child_hash, - &mut current_value_bytes, - &subquery_path_result_set, - )?; - } - } - - let new_path_query = - PathQuery::new_unsized(vec![], subquery_value.unwrap()); - - let (child_proof_token_type, child_proof) = proof_reader - .read_next_proof(new_path.last().unwrap_or(&Default::default()))?; - - let child_hash = self.execute_subquery_proof( - child_proof_token_type, - child_proof, - proof_reader, - &new_path_query, - new_path, - )?; - - let combined_child_hash = combine_hash( - value_hash_fn(¤t_value_bytes).value(), - &child_hash, - ) - .value() - .to_owned(); - - if combined_child_hash != expected_combined_child_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_combined_child_hash) - ))); - } - } - _ => { - // encountered a non tree element, we can't apply a subquery to it - // add it to the result set. - if self.limit == Some(0) { - break; - } - - let skip_limit = - reduce_limit_and_offset_by(&mut self.limit, &mut self.offset, 1); - - if !skip_limit { - // only insert to the result set if the offset value is not greater - // than 0 - self.result_set - .push(ProvedPathKeyValue::from_proved_key_value( - path, - ProvedKeyValue { - key, - value: value_bytes, - proof: value_hash, - }, - )); - } - } - } - } - } - ProofTokenType::EmptyTree => { - last_root_hash = EMPTY_TREE_HASH; - } - t => { - // execute_subquery_proof only expects proofs for merk trees - // root proof is handled separately - return Err(Error::InvalidProof(format!( - "wrong proof type, expected sized merk, merk or empty tree but got {}", - t - ))); - } - } - Ok(last_root_hash) - } - - /// Deserialize subkey_element and update expected root hash and element - /// value - fn update_root_key_from_subquery_path_element( - expected_child_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - subquery_path_result_set: &[ProvedPathKeyValue], - ) -> Result<(), Error> { - let elem_value = &subquery_path_result_set[0].value; - let subquery_path_element = Element::deserialize(elem_value) - .map_err(|_| Error::CorruptedData("failed to deserialize element".to_string()))?; - match subquery_path_element { - Element::Tree(..) | Element::SumTree(..) => { - *expected_child_hash = subquery_path_result_set[0].proof; - *current_value_bytes = subquery_path_result_set[0].value.to_owned(); - } - e => { - // the means that the subquery path pointed to a non tree - // element, this is not valid as you cannot apply the - // the subquery value to non tree items - return Err(Error::InvalidProof(format!( - "subquery path cannot point to non tree element, got {}", - e.type_str() - ))); - } - } - Ok(()) - } - - /// Checks that a valid proof showing the existence or absence of the - /// subquery path is present - fn verify_subquery_path( - &mut self, - proof_reader: &mut ProofReader, - expected_proof_token_type: ProofTokenType, - subquery_path: &mut Path, - expected_root_hash: &mut CryptoHash, - current_value_bytes: &mut Vec, - current_path: &mut Path, - ) -> Result<(CryptoHash, Option, EncounteredAbsence), Error> { - // the subquery path contains at least one item. - let last_key = subquery_path.remove(subquery_path.len() - 1); - - for subquery_key in subquery_path.iter() { - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - // intermediate proofs are all going to be unsized merk proofs - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected MERK proof type for intermediate subquery path keys, got {}", - proof_token_type - ))); - } - match proof_token_type { - ProofTokenType::Merk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(subquery_key.to_owned()); - current_path.push(subquery_key.to_owned()); - - let (proof_root_hash, result_set) = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - // should always be some as we force the proof type to be MERK - debug_assert!(result_set.is_some(), "{}", true); - - // result_set being empty means we could not find the given key in the subtree - // which essentially means an absence proof - if result_set - .as_ref() - .expect("result set should always be some for merk proof type") - .is_empty() - { - return Ok((proof_root_hash, None, true)); - } - - // verify that the elements in the subquery path are linked by root hashes. - let combined_child_hash = - combine_hash(value_hash_fn(current_value_bytes).value(), &proof_root_hash) - .value() - .to_owned(); - - if combined_child_hash != *expected_root_hash { - return Err(Error::InvalidProof(format!( - "child hash {} doesn't match the expected hash {}", - hex::encode(combined_child_hash), - hex::encode(expected_root_hash) - ))); - } - - // after confirming they are linked use the latest hash values for subsequent - // checks - Self::update_root_key_from_subquery_path_element( - expected_root_hash, - current_value_bytes, - &result_set.expect("confirmed is some"), - )?; - } - t => { - return Err(Error::InvalidProof(format!( - "expected merk of sized merk proof type for subquery path, got {}", - t - ))); - } - } - } - - let (proof_token_type, subkey_proof) = - proof_reader.read_next_proof(current_path.last().unwrap_or(&Default::default()))?; - if proof_token_type != expected_proof_token_type { - return Err(Error::InvalidProof(format!( - "unexpected proof type for subquery path, expected {}, got {}", - expected_proof_token_type, proof_token_type - ))); - } - - match proof_token_type { - ProofTokenType::Merk | ProofTokenType::SizedMerk => { - let mut key_as_query = Query::new(); - key_as_query.insert_key(last_key.to_owned()); - - let verification_result = self.execute_merk_proof( - proof_token_type, - &subkey_proof, - &key_as_query, - key_as_query.left_to_right, - current_path.to_owned(), - )?; - - current_path.push(last_key); - - Ok((verification_result.0, verification_result.1, false)) - } - t => Err(Error::InvalidProof(format!( - "expected merk or sized merk proof type for subquery path, got {}", - t - ))), - } - } - - fn verify_absent_path( - &mut self, - proof_reader: &mut ProofReader, - path_slices: Vec<&[u8]>, - ) -> Result<[u8; 32], Error> { - let mut root_key_hash = None; - let mut expected_child_hash = None; - let mut last_result_set: ProvedPathKeyValues = vec![]; - - for key in path_slices { - let (proof_token_type, merk_proof, _) = proof_reader.read_proof()?; - if proof_token_type == ProofTokenType::EmptyTree { - // when we encounter the empty tree op, we need to ensure - // that the expected tree hash is the combination of the - // Element_value_hash and the empty root hash [0; 32] - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &[0; 32], - ) - .unwrap(); - if Some(combined_hash) != expected_child_hash { - return Err(Error::InvalidProof( - "proof invalid: could not verify empty subtree while generating absent \ - path proof" - .to_string(), - )); - } else { - last_result_set = vec![]; - break; - } - } else if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "expected a merk proof for absent path, got {}", - proof_token_type - ))); - } - - let mut child_query = Query::new(); - child_query.insert_key(key.to_vec()); - - // TODO: don't pass empty vec - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &merk_proof, - &child_query, - true, - // cannot return a result set - Vec::new(), - )?; - - if let Some(expected_child_hash) = expected_child_hash { - let combined_hash = combine_hash( - value_hash_fn(last_result_set[0].value.as_slice()).value(), - &proof_result.0, - ) - .value() - .to_owned(); - if combined_hash != expected_child_hash { - return Err(Error::InvalidProof(format!( - "proof invalid: invalid parent, expected {}, got {}", - hex::encode(expected_child_hash), - hex::encode(combined_hash) - ))); - } - } else { - root_key_hash = Some(proof_result.0); - } - - last_result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if last_result_set.is_empty() { - // if result set is empty then we have reached the absence point, break - break; - } - - let elem = Element::deserialize(last_result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(Some(last_result_set[0].proof)), - e => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - e.type_str() - ))), - }?; - expected_child_hash = child_hash; - } - - if last_result_set.is_empty() { - if let Some(hash) = root_key_hash { - Ok(hash) - } else { - Err(Error::InvalidProof( - "proof invalid: no non root tree found".to_string(), - )) - } - } else { - Err(Error::InvalidProof( - "proof invalid: path not absent".to_string(), - )) - } - } - - /// Verifies that the correct proof was provided to confirm the path in - /// query - fn verify_path_to_root( - &mut self, - query: &PathQuery, - path_slices: Vec<&[u8]>, - proof_reader: &mut ProofReader, - expected_root_hash: &mut [u8; 32], - ) -> Result<[u8; 32], Error> { - let mut split_path = path_slices.split_last(); - while let Some((key, path_slice)) = split_path { - // for every subtree, there should be a corresponding proof for the parent - // which should prove that this subtree is a child of the parent tree - let (proof_token_type, parent_merk_proof) = - proof_reader.read_next_proof(path_slice.last().unwrap_or(&Default::default()))?; - if proof_token_type != ProofTokenType::Merk { - return Err(Error::InvalidProof(format!( - "wrong data_type expected Merk Proof, got {}", - proof_token_type - ))); - } - - let mut parent_query = Query::new(); - parent_query.insert_key(key.to_vec()); - - let proof_result = self.execute_merk_proof( - ProofTokenType::Merk, - &parent_merk_proof, - &parent_query, - query.query.query.left_to_right, - // TODO: don't pass empty vec - Vec::new(), - )?; - - let result_set = proof_result - .1 - .expect("MERK_PROOF always returns a result set"); - if result_set.is_empty() || &result_set[0].key != key { - return Err(Error::InvalidProof( - "proof invalid: invalid parent".to_string(), - )); - } - - let elem = Element::deserialize(result_set[0].value.as_slice())?; - let child_hash = match elem { - Element::Tree(..) | Element::SumTree(..) => Ok(result_set[0].proof), - t => Err(Error::InvalidProof(format!( - "intermediate proofs should be for trees, got {}", - t.type_str() - ))), - }?; - - let combined_root_hash = combine_hash( - value_hash_fn(&result_set[0].value).value(), - expected_root_hash, - ) - .value() - .to_owned(); - if child_hash != combined_root_hash { - return Err(Error::InvalidProof(format!( - "Bad path: tree hash does not have expected hash, got {}, expected {}", - hex::encode(child_hash), - hex::encode(combined_root_hash) - ))); - } - - *expected_root_hash = proof_result.0; - - split_path = path_slice.split_last(); - } - - Ok(*expected_root_hash) - } - - /// Execute a merk proof, update the state when a sized proof is - /// encountered i.e. update the limit, offset and result set values - fn execute_merk_proof( - &mut self, - proof_token_type: ProofTokenType, - proof: &[u8], - query: &Query, - left_to_right: bool, - path: Path, - ) -> Result<(CryptoHash, Option), Error> { - let is_sized_proof = proof_token_type == ProofTokenType::SizedMerk; - let mut limit = None; - let mut offset = None; - - if is_sized_proof { - limit = self.limit; - offset = self.offset; - } - - let (hash, result) = - grovedb_merk::execute_proof(proof, query, limit, offset, left_to_right) - .unwrap() - .map_err(|e| { - eprintln!("{e}"); - Error::InvalidProof("invalid proof verification parameters".to_string()) - })?; - - // convert the result set to proved_path_key_values - let proved_path_key_values = - ProvedPathKeyValue::from_proved_key_values(path, result.result_set); - - if is_sized_proof { - self.limit = result.limit; - self.offset = result.offset; - self.result_set.extend(proved_path_key_values); - Ok((hash, None)) - } else { - Ok((hash, Some(proved_path_key_values))) - } - } -} diff --git a/grovedb/src/query/mod.rs b/grovedb/src/query/mod.rs index db75144d8..7b4fe42a9 100644 --- a/grovedb/src/query/mod.rs +++ b/grovedb/src/query/mod.rs @@ -1,41 +1,19 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Queries -use std::cmp::Ordering; +use std::{ + borrow::{Cow, Cow::Borrowed}, + cmp::Ordering, + fmt, +}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::query::query_item::QueryItem; -use grovedb_merk::proofs::query::SubqueryBranch; +use grovedb_merk::proofs::query::{Key, SubqueryBranch}; #[cfg(any(feature = "full", feature = "verify"))] use grovedb_merk::proofs::Query; +use indexmap::IndexMap; +use crate::operations::proof::util::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] use crate::query_result_type::PathKey; #[cfg(any(feature = "full", feature = "verify"))] @@ -55,6 +33,20 @@ pub struct PathQuery { pub query: SizedQuery, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for PathQuery { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "PathQuery {{ path: [")?; + for (i, path_element) in self.path.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", hex_to_ascii(path_element))?; + } + write!(f, "], query: {} }}", self.query) + } +} + #[cfg(any(feature = "full", feature = "verify"))] #[derive(Debug, Clone)] /// Holds a query to apply to a tree and an optional limit/offset value. @@ -68,6 +60,20 @@ pub struct SizedQuery { pub offset: Option, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for SizedQuery { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "SizedQuery {{ query: {}", self.query)?; + if let Some(limit) = self.limit { + write!(f, ", limit: {}", limit)?; + } + if let Some(offset) = self.offset { + write!(f, ", offset: {}", offset)?; + } + write!(f, " }}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl SizedQuery { /// New sized query @@ -269,19 +275,289 @@ impl PathQuery { } } } + + pub fn query_items_at_path(&self, path: &[&[u8]]) -> Option { + fn recursive_query_items<'b>( + query: &'b Query, + path: &[&[u8]], + ) -> Option> { + if path.is_empty() { + return Some(SinglePathSubquery::from_query(query)); + } + + let key = path[0]; + let path_after_top_removed = &path[1..]; + + if let Some(conditional_branches) = &query.conditional_subquery_branches { + for (query_item, subquery_branch) in conditional_branches { + if query_item.contains(key) { + if let Some(subquery_path) = &subquery_branch.subquery_path { + if path_after_top_removed.len() <= subquery_path.len() { + if path_after_top_removed + .iter() + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { + return if path_after_top_removed.len() == subquery_path.len() { + subquery_branch.subquery.as_ref().map(|subquery| { + SinglePathSubquery::from_query(subquery) + }) + } else { + let last_path_item = path.len() == subquery_path.len(); + let has_subquery = subquery_branch.subquery.is_some(); + Some(SinglePathSubquery::from_key_when_in_path( + &subquery_path[path_after_top_removed.len()], + last_path_item, + has_subquery, + )) + }; + } + } else if path_after_top_removed + .iter() + .take(subquery_path.len()) + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { + if let Some(subquery) = &subquery_branch.subquery { + return recursive_query_items( + subquery, + &path_after_top_removed[subquery_path.len()..], + ); + } + } + } else if let Some(subquery) = &subquery_branch.subquery { + return recursive_query_items(subquery, path_after_top_removed); + } + + return None; + } + } + } + + if let Some(subquery_path) = &query.default_subquery_branch.subquery_path { + if path_after_top_removed.len() <= subquery_path.len() { + if path_after_top_removed + .iter() + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { + // The paths are equal for example if we had a sub path of + // path : 1 / 2 + // subquery : All items + + // If we are asking what is the subquery when we are at 1 / 2 + // we should get + return if path_after_top_removed.len() == subquery_path.len() { + query + .default_subquery_branch + .subquery + .as_ref() + .map(|subquery| SinglePathSubquery::from_query(subquery)) + } else { + let last_path_item = path.len() == subquery_path.len(); + let has_subquery = query.default_subquery_branch.subquery.is_some(); + Some(SinglePathSubquery::from_key_when_in_path( + &subquery_path[path_after_top_removed.len()], + last_path_item, + has_subquery, + )) + }; + } + } else if path_after_top_removed + .iter() + .take(subquery_path.len()) + .zip(subquery_path) + .all(|(a, b)| *a == b.as_slice()) + { + if let Some(subquery) = &query.default_subquery_branch.subquery { + return recursive_query_items( + subquery, + &path_after_top_removed[subquery_path.len()..], + ); + } + } + } else if let Some(subquery) = &query.default_subquery_branch.subquery { + return recursive_query_items(subquery, path_after_top_removed); + } + + None + } + + let self_path_len = self.path.len(); + let given_path_len = path.len(); + + match given_path_len.cmp(&self_path_len) { + Ordering::Less => { + if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { + Some(SinglePathSubquery::from_key_when_in_path( + &self.path[given_path_len], + false, + true, + )) + } else { + None + } + } + Ordering::Equal => { + if path.iter().zip(&self.path).all(|(a, b)| *a == b.as_slice()) { + Some(SinglePathSubquery::from_path_query(self)) + } else { + None + } + } + Ordering::Greater => { + if !self.path.iter().zip(path).all(|(a, b)| a.as_slice() == *b) { + return None; + } + recursive_query_items(&self.query.query, &path[self_path_len..]) + } + } + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, Clone, PartialEq)] +pub enum HasSubquery<'a> { + NoSubquery, + Always, + Conditionally(Cow<'a, IndexMap>), +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl<'a> fmt::Display for HasSubquery<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + HasSubquery::NoSubquery => write!(f, "NoSubquery"), + HasSubquery::Always => write!(f, "Always"), + HasSubquery::Conditionally(map) => { + writeln!(f, "Conditionally {{")?; + for (query_item, subquery_branch) in map.iter() { + writeln!(f, " {query_item}: {subquery_branch},")?; + } + write!(f, "}}") + } + } + } +} + +impl<'a> HasSubquery<'a> { + /// Checks to see if we have a subquery on a specific key + pub fn has_subquery_on_key(&self, key: &[u8]) -> bool { + match self { + HasSubquery::NoSubquery => false, + HasSubquery::Conditionally(conditionally) => conditionally + .keys() + .any(|query_item| query_item.contains(key)), + HasSubquery::Always => true, + } + } +} + +/// This represents a query where the items might be borrowed, it is used to get +/// subquery information +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(Debug, Clone, PartialEq)] +pub struct SinglePathSubquery<'a> { + /// Items + pub items: Cow<'a, Vec>, + /// Default subquery branch + pub has_subquery: HasSubquery<'a>, + /// Left to right? + pub left_to_right: bool, + /// In the path of the path_query, or in a subquery path + pub in_path: Option>, +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl<'a> fmt::Display for SinglePathSubquery<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "InternalCowItemsQuery {{")?; + writeln!(f, " items: [")?; + for item in self.items.iter() { + writeln!(f, " {item},")?; + } + writeln!(f, " ]")?; + writeln!(f, " has_subquery: {}", self.has_subquery)?; + writeln!(f, " left_to_right: {}", self.left_to_right)?; + match &self.in_path { + Some(path) => writeln!(f, " in_path: Some({})", hex_to_ascii(path)), + None => writeln!(f, " in_path: None"), + }?; + write!(f, "}}") + } +} + +impl<'a> SinglePathSubquery<'a> { + /// Checks to see if we have a subquery on a specific key + pub fn has_subquery_or_matching_in_path_on_key(&self, key: &[u8]) -> bool { + if self.has_subquery.has_subquery_on_key(key) { + true + } else if let Some(path) = self.in_path.as_ref() { + path.as_slice() == key + } else { + false + } + } + + pub fn from_key_when_in_path( + key: &'a Vec, + subquery_is_last_path_item: bool, + subquery_has_inner_subquery: bool, + ) -> SinglePathSubquery<'a> { + // in this case there should be no in_path, because we are trying to get this + // level of items and nothing underneath + let in_path = if subquery_is_last_path_item && !subquery_has_inner_subquery { + None + } else { + Some(Borrowed(key)) + }; + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(key.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path, + } + } + + pub fn from_path_query(path_query: &PathQuery) -> SinglePathSubquery { + Self::from_query(&path_query.query.query) + } + + pub fn from_query(query: &Query) -> SinglePathSubquery { + let has_subquery = if query.default_subquery_branch.subquery.is_some() + || query.default_subquery_branch.subquery_path.is_some() + { + HasSubquery::Always + } else if let Some(conditional) = query.conditional_subquery_branches.as_ref() { + HasSubquery::Conditionally(Cow::Borrowed(conditional)) + } else { + HasSubquery::NoSubquery + }; + SinglePathSubquery { + items: Cow::Borrowed(&query.items), + has_subquery, + left_to_right: query.left_to_right, + in_path: None, + } + } } #[cfg(feature = "full")] #[cfg(test)] mod tests { - use std::ops::RangeFull; + use std::{borrow::Cow, ops::RangeFull}; - use grovedb_merk::proofs::{query::query_item::QueryItem, Query}; + use grovedb_merk::proofs::{ + query::{query_item::QueryItem, SubqueryBranch}, + Query, + }; + use indexmap::IndexMap; use crate::{ + query::{HasSubquery, SinglePathSubquery}, query_result_type::QueryResultType, tests::{common::compare_result_tuples, make_deep_tree, TEST_LEAF}, - Element, GroveDb, PathQuery, + Element, GroveDb, PathQuery, SizedQuery, }; #[test] @@ -294,7 +570,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 1); @@ -304,7 +580,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 1); @@ -312,7 +588,10 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge path queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_tree) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set_tree.len(), 2); @@ -330,7 +609,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 1); @@ -340,7 +619,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 1); @@ -350,7 +629,10 @@ mod tests { assert_eq!(merged_path_query.path, vec![TEST_LEAF.to_vec()]); assert_eq!(merged_path_query.query.query.items.len(), 2); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -374,7 +656,7 @@ mod tests { query_one.clone(), ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 3); @@ -391,7 +673,7 @@ mod tests { query_two.clone(), ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -408,7 +690,10 @@ mod tests { query_three.clone(), ); - let proof = temp_db.prove_query(&path_query_three).unwrap().unwrap(); + let proof = temp_db + .prove_query(&path_query_three, None) + .unwrap() + .unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -544,7 +829,10 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 7); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, proved_result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -589,7 +877,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set_one) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set_one.len(), 6); @@ -606,7 +894,7 @@ mod tests { query_two, ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set_two) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set_two.len(), 2); @@ -615,7 +903,10 @@ mod tests { .expect("expect to merge path queries"); assert_eq!(merged_path_query.path, vec![b"deep_leaf".to_vec()]); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set_merged) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); @@ -655,7 +946,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -665,7 +956,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -677,7 +968,10 @@ mod tests { query_three, ); - let proof = temp_db.prove_query(&path_query_three).unwrap().unwrap(); + let proof = temp_db + .prove_query(&path_query_three, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_three) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -686,7 +980,10 @@ mod tests { PathQuery::merge(vec![&path_query_one, &path_query_two, &path_query_three]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); @@ -705,7 +1002,7 @@ mod tests { let path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_one); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -715,7 +1012,7 @@ mod tests { let path_query_two = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query_two); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 1); @@ -723,7 +1020,10 @@ mod tests { let merged_path_query = PathQuery::merge(vec![&path_query_one, &path_query_two]) .expect("should merge three queries"); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -738,7 +1038,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_one).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_one, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_one) .expect("should execute proof"); assert_eq!(result_set.len(), 2); @@ -755,7 +1055,7 @@ mod tests { query_one, ); - let proof = temp_db.prove_query(&path_query_two).unwrap().unwrap(); + let proof = temp_db.prove_query(&path_query_two, None).unwrap().unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query_two) .expect("should execute proof"); assert_eq!(result_set.len(), 3); @@ -838,9 +1138,450 @@ mod tests { .expect("expected to get results"); assert_eq!(result_set_merged.len(), 4); - let proof = temp_db.prove_query(&merged_path_query).unwrap().unwrap(); + let proof = temp_db + .prove_query(&merged_path_query, None) + .unwrap() + .unwrap(); let (_, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &merged_path_query) .expect("should execute proof"); assert_eq!(result_set.len(), 4); } + + #[test] + fn test_path_query_items_with_subquery_and_inner_subquery_path() { + // Constructing the keys and paths + let root_path_key_1 = b"root_path_key_1".to_vec(); + let root_path_key_2 = b"root_path_key_2".to_vec(); + let root_item_key = b"root_item_key".to_vec(); + let subquery_path_key_1 = b"subquery_path_key_1".to_vec(); + let subquery_path_key_2 = b"subquery_path_key_2".to_vec(); + let subquery_item_key = b"subquery_item_key".to_vec(); + let inner_subquery_path_key = b"inner_subquery_path_key".to_vec(); + + // Constructing the subquery + let subquery = Query { + items: vec![QueryItem::Key(subquery_item_key.clone())], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![inner_subquery_path_key.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + }; + + // Constructing the PathQuery + let path_query = PathQuery { + path: vec![root_path_key_1.clone(), root_path_key_2.clone()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::Key(root_item_key.clone())], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![ + subquery_path_key_1.clone(), + subquery_path_key_2.clone(), + ]), + subquery: Some(Box::new(subquery)), + }, + left_to_right: true, + conditional_subquery_branches: None, + }, + limit: Some(2), + offset: None, + }, + }; + + { + let path = vec![root_path_key_1.as_slice()]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(root_path_key_2.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&root_path_key_2)), + } + ); + } + + { + let path = vec![root_path_key_1.as_slice(), root_path_key_2.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(root_item_key.clone())]), + has_subquery: HasSubquery::Always, /* This is correct because there's a + * subquery for one item */ + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + ]; + + let third = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + third, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_1.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&subquery_path_key_1)) + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + ]; + + let fourth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + fourth, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(subquery_path_key_2.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&subquery_path_key_2)) + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + subquery_path_key_2.as_slice(), + ]; + + let fifth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + fifth, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(subquery_item_key.clone())]), + has_subquery: HasSubquery::Always, /* This means that we should be able to + * add items underneath */ + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![ + root_path_key_1.as_slice(), + root_path_key_2.as_slice(), + root_item_key.as_slice(), + subquery_path_key_1.as_slice(), + subquery_path_key_2.as_slice(), + subquery_item_key.as_slice(), + ]; + + let sixth = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + sixth, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(inner_subquery_path_key.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: None, + } + ); + } + } + + #[test] + fn test_path_query_items_with_subquery_path() { + // Constructing the keys and paths + let root_path_key = b"higher".to_vec(); + let dash_key = b"dash".to_vec(); + let quantum_key = b"quantum".to_vec(); + + // Constructing the PathQuery + let path_query = PathQuery { + path: vec![root_path_key.clone()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![quantum_key.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + }, + limit: Some(100), + offset: None, + }, + }; + + // Validating the PathQuery structure + { + let path = vec![root_path_key.as_slice()]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), + has_subquery: HasSubquery::Always, + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![root_path_key.as_slice(), dash_key.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(quantum_key.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: None, // There should be no path because we are at the end of the path + } + ); + } + } + + #[test] + fn test_conditional_subquery_refusing_elements() { + let empty_vec: Vec = vec![]; + let zero_vec: Vec = vec![0]; + + let mut conditional_subquery_branches = IndexMap::new(); + conditional_subquery_branches.insert( + QueryItem::Key(b"".to_vec()), + SubqueryBranch { + subquery_path: Some(vec![zero_vec.clone()]), + subquery: Some(Query::new().into()), + }, + ); + + let path_query = PathQuery { + path: vec![TEST_LEAF.to_vec()], + query: SizedQuery { + query: Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: Some(vec![zero_vec.clone()]), + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: Some(conditional_subquery_branches), + }, + limit: Some(100), + offset: None, + }, + }; + + { + let path = vec![TEST_LEAF, empty_vec.as_slice()]; + + let second = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + second, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(zero_vec.clone())]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&zero_vec)), + } + ); + } + } + + #[test] + fn test_complex_path_query_with_conditional_subqueries() { + let identity_id = + hex::decode("8b8948a6801501bbe0431e3d994dcf71cf5a2a0939fe51b0e600076199aba4fb") + .unwrap(); + + let key_20 = vec![20u8]; + + let key_80 = vec![80u8]; + + let inner_conditional_subquery_branches = IndexMap::from([( + QueryItem::Key(vec![80]), + SubqueryBranch { + subquery_path: None, + subquery: Some(Box::new(Query { + items: vec![QueryItem::RangeFull(RangeFull)], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + })), + }, + )]); + + let conditional_subquery_branches = IndexMap::from([ + ( + QueryItem::Key(vec![]), + SubqueryBranch { + subquery_path: None, + subquery: Some(Box::new(Query { + items: vec![QueryItem::Key(identity_id.to_vec())], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + left_to_right: true, + conditional_subquery_branches: None, + })), + }, + ), + ( + QueryItem::Key(vec![20]), + SubqueryBranch { + subquery_path: Some(vec![identity_id.to_vec()]), + subquery: Some(Box::new(Query { + items: vec![QueryItem::Key(vec![80]), QueryItem::Key(vec![0xc0])], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + conditional_subquery_branches: Some( + inner_conditional_subquery_branches.clone(), + ), + left_to_right: true, + })), + }, + ), + ]); + + let path_query = PathQuery { + path: vec![], + query: SizedQuery { + query: Query { + items: vec![QueryItem::Key(vec![20]), QueryItem::Key(vec![96])], + default_subquery_branch: SubqueryBranch { + subquery_path: None, + subquery: None, + }, + conditional_subquery_branches: Some(conditional_subquery_branches.clone()), + left_to_right: true, + }, + limit: Some(100), + offset: None, + }, + }; + + { + let path = vec![]; + let first = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + first, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(vec![20]), QueryItem::Key(vec![96]),]), + has_subquery: HasSubquery::Conditionally(Cow::Borrowed( + &conditional_subquery_branches + )), + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![key_20.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(identity_id.clone()),]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: Some(Cow::Borrowed(&identity_id)), + } + ); + } + + { + let path = vec![key_20.as_slice(), identity_id.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::Key(vec![80]), QueryItem::Key(vec![0xc0]),]), + has_subquery: HasSubquery::Conditionally(Cow::Borrowed( + &inner_conditional_subquery_branches + )), + left_to_right: true, + in_path: None, + } + ); + } + + { + let path = vec![key_20.as_slice(), identity_id.as_slice(), key_80.as_slice()]; + let query = path_query + .query_items_at_path(&path) + .expect("expected query items"); + + assert_eq!( + query, + SinglePathSubquery { + items: Cow::Owned(vec![QueryItem::RangeFull(RangeFull)]), + has_subquery: HasSubquery::NoSubquery, + left_to_right: true, + in_path: None, + } + ); + } + } } diff --git a/grovedb/src/query_result_type.rs b/grovedb/src/query_result_type.rs index 289ffb268..6bf8bd5b1 100644 --- a/grovedb/src/query_result_type.rs +++ b/grovedb/src/query_result_type.rs @@ -1,41 +1,19 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Determines the query result form use std::{ collections::{BTreeMap, HashMap}, + fmt, vec::IntoIter, }; pub use grovedb_merk::proofs::query::{Key, Path, PathKey}; -use crate::{operations::proof::util::ProvedPathKeyValue, Element, Error}; +use crate::{ + operations::proof::util::{ + hex_to_ascii, path_hex_to_ascii, ProvedPathKeyOptionalValue, ProvedPathKeyValue, + }, + Element, Error, +}; #[derive(Copy, Clone)] /// Query result type @@ -48,13 +26,116 @@ pub enum QueryResultType { QueryPathKeyElementTrioResultType, } +impl fmt::Display for QueryResultType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryResultType::QueryElementResultType => write!(f, "QueryElementResultType"), + QueryResultType::QueryKeyElementPairResultType => { + write!(f, "QueryKeyElementPairResultType") + } + QueryResultType::QueryPathKeyElementTrioResultType => { + write!(f, "QueryPathKeyElementTrioResultType") + } + } + } +} + /// Query result elements -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub struct QueryResultElements { /// Elements pub elements: Vec, } +impl fmt::Display for QueryResultElements { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "QueryResultElements {{")?; + for (index, element) in self.elements.iter().enumerate() { + writeln!(f, " {}: {}", index, element)?; + } + write!(f, "}}") + } +} + +#[derive(Debug, Clone)] +pub enum BTreeMapLevelResultOrItem { + BTreeMapLevelResult(BTreeMapLevelResult), + ResultItem(Element), +} + +/// BTreeMap level result +#[derive(Debug, Clone)] +pub struct BTreeMapLevelResult { + pub key_values: BTreeMap, +} + +impl fmt::Display for BTreeMapLevelResultOrItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(result) => { + write!(f, "{}", result) + } + BTreeMapLevelResultOrItem::ResultItem(element) => { + write!(f, "{}", element) + } + } + } +} + +impl fmt::Display for BTreeMapLevelResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "BTreeMapLevelResult {{")?; + self.fmt_inner(f, 1)?; + write!(f, "}}") + } +} + +impl BTreeMapLevelResult { + fn fmt_inner(&self, f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result { + for (key, value) in &self.key_values { + write!(f, "{:indent$}", "", indent = indent * 2)?; + write!(f, "{}: ", hex_to_ascii(key))?; + match value { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(result) => { + writeln!(f, "BTreeMapLevelResult {{")?; + result.fmt_inner(f, indent + 1)?; + write!(f, "{:indent$}}}", "", indent = indent * 2)?; + } + BTreeMapLevelResultOrItem::ResultItem(element) => { + write!(f, "{}", element)?; + } + } + writeln!(f)?; + } + Ok(()) + } +} + +impl BTreeMapLevelResult { + pub fn len_of_values_at_path(&self, path: &[&[u8]]) -> u16 { + let mut current = self; + + // Traverse the path + for segment in path { + match current.key_values.get(*segment) { + Some(BTreeMapLevelResultOrItem::BTreeMapLevelResult(next_level)) => { + current = next_level; + } + Some(BTreeMapLevelResultOrItem::ResultItem(_)) => { + // We've reached a ResultItem before the end of the path + return 0; + } + None => { + // Path not found + return 0; + } + } + } + + current.key_values.len() as u16 + } +} + impl QueryResultElements { /// New pub fn new() -> Self { @@ -62,7 +143,7 @@ impl QueryResultElements { } /// From elements - pub(crate) fn from_elements(elements: Vec) -> Self { + pub fn from_elements(elements: Vec) -> Self { QueryResultElements { elements } } @@ -209,6 +290,21 @@ impl QueryResultElements { map } + /// To path to key, elements btree map + pub fn to_path_to_key_elements_btree_map(self) -> BTreeMap> { + let mut map: BTreeMap> = BTreeMap::new(); + + for result_item in self.elements.into_iter() { + if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = + result_item + { + map.entry(path).or_default().insert(key, element); + } + } + + map + } + /// To last path to key, elements btree map pub fn to_last_path_to_key_elements_btree_map(self) -> BTreeMap> { let mut map: BTreeMap, BTreeMap> = BTreeMap::new(); @@ -218,9 +314,7 @@ impl QueryResultElements { result_item { if let Some(last) = path.pop() { - map.entry(last) - .or_insert_with(BTreeMap::new) - .insert(key, element); + map.entry(last).or_default().insert(key, element); } } } @@ -246,6 +340,60 @@ impl QueryResultElements { map } + /// To last path to elements btree map + /// This is useful if the key is not import + pub fn to_btree_map_level_results(self) -> BTreeMapLevelResult { + fn insert_recursive( + current_level: &mut BTreeMapLevelResult, + mut path: std::vec::IntoIter>, + key: Vec, + element: Element, + ) { + if let Some(segment) = path.next() { + let next_level = current_level.key_values.entry(segment).or_insert_with(|| { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new(), + }) + }); + + match next_level { + BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) => { + insert_recursive(inner, path, key, element); + } + BTreeMapLevelResultOrItem::ResultItem(_) => { + // This shouldn't happen in a well-formed structure, but we'll handle it + // anyway + *next_level = + BTreeMapLevelResultOrItem::BTreeMapLevelResult(BTreeMapLevelResult { + key_values: BTreeMap::new(), + }); + if let BTreeMapLevelResultOrItem::BTreeMapLevelResult(inner) = next_level { + insert_recursive(inner, path, key, element); + } + } + } + } else { + current_level + .key_values + .insert(key, BTreeMapLevelResultOrItem::ResultItem(element)); + } + } + + let mut root = BTreeMapLevelResult { + key_values: BTreeMap::new(), + }; + + for result_item in self.elements { + if let QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) = + result_item + { + insert_recursive(&mut root, path.into_iter(), key, element); + } + } + + root + } + /// To last path to keys btree map /// This is useful if for example the element is a sum item and isn't /// important Used in Platform Drive for getting voters for multiple @@ -257,9 +405,9 @@ impl QueryResultElements { if let QueryResultElement::PathKeyElementTrioResultItem((mut path, key, _)) = result_item { - if let Some(_) = path.pop() { + if path.pop().is_some() { if let Some(last) = path.pop() { - map.entry(last).or_insert_with(Vec::new).push(key); + map.entry(last).or_default().push(key); } } } @@ -276,7 +424,7 @@ impl Default for QueryResultElements { } /// Query result element -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Eq, PartialEq)] pub enum QueryResultElement { /// Element result item ElementResultItem(Element), @@ -286,6 +434,33 @@ pub enum QueryResultElement { PathKeyElementTrioResultItem(PathKeyElementTrio), } +impl fmt::Display for QueryResultElement { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryResultElement::ElementResultItem(element) => { + write!(f, "ElementResultItem({})", element) + } + QueryResultElement::KeyElementPairResultItem((key, element)) => { + write!( + f, + "KeyElementPairResultItem(key: {}, element: {})", + hex_to_ascii(key), + element + ) + } + QueryResultElement::PathKeyElementTrioResultItem((path, key, element)) => { + write!( + f, + "PathKeyElementTrioResultItem(path: {}, key: {}, element: {})", + path_hex_to_ascii(path), + hex_to_ascii(key), + element + ) + } + } + } +} + #[cfg(feature = "full")] impl QueryResultElement { /// Map element @@ -341,6 +516,23 @@ impl TryFrom for PathKeyOptionalElementTrio { } } +#[cfg(any(feature = "full", feature = "verify"))] +impl TryFrom for PathKeyOptionalElementTrio { + type Error = Error; + + fn try_from(proved_path_key_value: ProvedPathKeyOptionalValue) -> Result { + let element = proved_path_key_value + .value + .map(|e| Element::deserialize(e.as_slice())) + .transpose()?; + Ok(( + proved_path_key_value.path, + proved_path_key_value.key, + element, + )) + } +} + #[cfg(feature = "full")] #[cfg(test)] mod tests { diff --git a/grovedb/src/reference_path.rs b/grovedb/src/reference_path.rs index 38c3f1473..09fc16846 100644 --- a/grovedb/src/reference_path.rs +++ b/grovedb/src/reference_path.rs @@ -86,6 +86,57 @@ pub enum ReferencePathType { SiblingReference(Vec), } +// Helper function to display paths +fn display_path(path: &[Vec]) -> String { + path.iter() + .map(hex::encode) + .collect::>() + .join("/") +} + +impl fmt::Display for ReferencePathType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ReferencePathType::AbsolutePathReference(path) => { + write!(f, "AbsolutePathReference({})", display_path(path)) + } + ReferencePathType::UpstreamRootHeightReference(height, path) => { + write!( + f, + "UpstreamRootHeightReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::UpstreamRootHeightWithParentPathAdditionReference(height, path) => { + write!( + f, + "UpstreamRootHeightWithParentPathAdditionReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::UpstreamFromElementHeightReference(height, path) => { + write!( + f, + "UpstreamFromElementHeightReference({}, {})", + height, + display_path(path) + ) + } + ReferencePathType::CousinReference(key) => { + write!(f, "CousinReference({})", hex::encode(key)) + } + ReferencePathType::RemovedCousinReference(path) => { + write!(f, "RemovedCousinReference({})", display_path(path)) + } + ReferencePathType::SiblingReference(key) => { + write!(f, "SiblingReference({})", hex::encode(key)) + } + } + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl ReferencePathType { /// Given the reference path type and the current qualified path (path+key), @@ -129,7 +180,7 @@ pub fn path_from_reference_qualified_path_type>( ) -> Result>, Error> { match current_qualified_path.split_last() { None => Err(Error::CorruptedPath( - "qualified path should always have an element", + "qualified path should always have an element".to_string(), )), Some((key, path)) => { path_from_reference_path_type(reference_path_type, path, Some(key.as_ref())) @@ -168,7 +219,7 @@ pub fn path_from_reference_path_type>( no_of_elements_to_keep, mut path, ) => { - if usize::from(no_of_elements_to_keep) > current_path.len() || current_path.len() == 0 { + if usize::from(no_of_elements_to_keep) > current_path.len() || current_path.is_empty() { return Err(Error::InvalidInput( "reference stored path cannot satisfy reference constraints", )); @@ -478,7 +529,7 @@ mod tests { ); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (hash, result) = diff --git a/grovedb/src/replication.rs b/grovedb/src/replication.rs index 5f7db1f3e..b65338686 100644 --- a/grovedb/src/replication.rs +++ b/grovedb/src/replication.rs @@ -364,7 +364,7 @@ impl GroveDb { || !state_sync_info.processed_prefixes.is_empty() { return Err(Error::InternalError( - "GroveDB has already started a snapshot syncing", + "GroveDB has already started a snapshot syncing".to_string(), )); } @@ -384,7 +384,9 @@ impl GroveDb { .insert(root_prefix, root_prefix_state_sync_info); state_sync_info.app_hash = app_hash; } else { - return Err(Error::InternalError("Unable to open merk for replication")); + return Err(Error::InternalError( + "Unable to open merk for replication".to_string(), + )); } Ok(state_sync_info) @@ -424,7 +426,9 @@ impl GroveDb { replication::util_split_global_chunk_id(global_chunk_id, &state_sync_info.app_hash)?; if state_sync_info.current_prefixes.is_empty() { - return Err(Error::InternalError("GroveDB is not in syncing mode")); + return Err(Error::InternalError( + "GroveDB is not in syncing mode".to_string(), + )); } if let Some(subtree_state_sync) = state_sync_info.current_prefixes.remove(&chunk_prefix) { if let Ok((res, mut new_subtree_state_sync)) = @@ -453,12 +457,16 @@ impl GroveDb { // Subtree is finished. We can save it. match new_subtree_state_sync.restorer.take() { - None => Err(Error::InternalError("Unable to finalize subtree")), + None => Err(Error::InternalError( + "Unable to finalize subtree".to_string(), + )), Some(restorer) => { if (new_subtree_state_sync.num_processed_chunks > 0) && (restorer.finalize().is_err()) { - return Err(Error::InternalError("Unable to finalize Merk")); + return Err(Error::InternalError( + "Unable to finalize Merk".to_string(), + )); } state_sync_info.processed_prefixes.insert(chunk_prefix); @@ -479,16 +487,20 @@ impl GroveDb { next_chunk_ids.extend(res); Ok((next_chunk_ids, new_state_sync_info)) } else { - Err(Error::InternalError("Unable to discover Subtrees")) + Err(Error::InternalError( + "Unable to discover Subtrees".to_string(), + )) } } } } } else { - Err(Error::InternalError("Unable to process incoming chunk")) + Err(Error::InternalError( + "Unable to process incoming chunk".to_string(), + )) } } else { - Err(Error::InternalError("Invalid incoming prefix")) + Err(Error::InternalError("Invalid incoming prefix".to_string())) } } @@ -510,7 +522,7 @@ impl GroveDb { Some(restorer) => { if !state_sync_info.pending_chunks.contains(chunk_id) { return Err(Error::InternalError( - "Incoming global_chunk_id not expected", + "Incoming global_chunk_id not expected".to_string(), )); } state_sync_info.pending_chunks.remove(chunk_id); @@ -529,7 +541,7 @@ impl GroveDb { } _ => { return Err(Error::InternalError( - "Unable to process incoming chunk", + "Unable to process incoming chunk".to_string(), )); } }; @@ -543,7 +555,9 @@ impl GroveDb { } } _ => { - return Err(Error::InternalError("Invalid internal state (restorer")); + return Err(Error::InternalError( + "Invalid internal state (restorer".to_string(), + )); } } @@ -593,7 +607,9 @@ impl GroveDb { let root_chunk_prefix = prefix.to_vec(); res.push(root_chunk_prefix.to_vec()); } else { - return Err(Error::InternalError("Unable to open Merk for replication")); + return Err(Error::InternalError( + "Unable to open Merk for replication".to_string(), + )); } } } diff --git a/grovedb/src/tests/common.rs b/grovedb/src/tests/common.rs index 10f05b804..2fe8dfdef 100644 --- a/grovedb/src/tests/common.rs +++ b/grovedb/src/tests/common.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Common tests use grovedb_path::SubtreePath; @@ -48,7 +20,7 @@ fn deserialize_and_extract_item_bytes(raw_bytes: &[u8]) -> Result, Error let elem = Element::deserialize(raw_bytes)?; match elem { Element::Item(item, _) => Ok(item), - _ => Err(Error::CorruptedPath("expected only item type")), + _ => Err(Error::CorruptedPath("expected only item type".to_string())), } } diff --git a/grovedb/src/tests/mod.rs b/grovedb/src/tests/mod.rs index 95e0d2b1f..43a9c34de 100644 --- a/grovedb/src/tests/mod.rs +++ b/grovedb/src/tests/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Tests pub mod common; @@ -47,8 +19,9 @@ use tempfile::TempDir; use self::common::EMPTY_PATH; use super::*; use crate::{ - query_result_type::QueryResultType::QueryKeyElementPairResultType, - reference_path::ReferencePathType, tests::common::compare_result_tuples, + query_result_type::{QueryResultType, QueryResultType::QueryKeyElementPairResultType}, + reference_path::ReferencePathType, + tests::common::compare_result_tuples, }; pub const TEST_LEAF: &[u8] = b"test_leaf"; @@ -157,6 +130,10 @@ pub fn make_deep_tree() -> TempGroveDb { // deeper_4 // k10,v10 // k11,v11 + // deeper_5 + // k12,v12 + // k13,v13 + // k14,v14 // Insert elements into grovedb instance let temp_db = make_test_grovedb(); @@ -339,6 +316,16 @@ pub fn make_deep_tree() -> TempGroveDb { ) .unwrap() .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2"].as_ref(), + b"deeper_5", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); // Insert level 3 nodes temp_db .insert( @@ -452,739 +439,511 @@ pub fn make_deep_tree() -> TempGroveDb { .unwrap() .expect("successful subtree insert"); temp_db -} - -#[test] -fn test_init() { - let tmp_dir = TempDir::new().unwrap(); - GroveDb::open(tmp_dir).expect("empty tree is ok"); -} - -#[test] -fn test_element_with_flags() { - let db = make_test_grovedb(); - - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem1", - Element::new_item(b"flagless".to_vec()), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem2", - Element::new_item_with_flags(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"elem3", - Element::new_tree_with_flags(None, Some([1].to_vec())), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - db.insert( - [TEST_LEAF, b"key1", b"elem3"].as_ref(), - b"elem4", - Element::new_reference_with_flags( - ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key1".to_vec(), - b"elem2".to_vec(), - ]), - Some([9].to_vec()), - ), - None, - None, - ) - .unwrap() - .expect("should insert subtree successfully"); - - let element_without_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem1", None) - .unwrap() - .expect("should get successfully"); - let element_with_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem2", None) - .unwrap() - .expect("should get successfully"); - let tree_element_with_flag = db - .get([TEST_LEAF, b"key1"].as_ref(), b"elem3", None) - .unwrap() - .expect("should get successfully"); - let flagged_ref_follow = db - .get([TEST_LEAF, b"key1", b"elem3"].as_ref(), b"elem4", None) - .unwrap() - .expect("should get successfully"); - - let mut query = Query::new(); - query.insert_key(b"elem4".to_vec()); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"key1".to_vec(), b"elem3".to_vec()], - SizedQuery::new(query, None, None), - ); - let (flagged_ref_no_follow, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryKeyElementPairResultType, - None, - ) - .unwrap() - .expect("should get successfully"); - - assert_eq!( - element_without_flag, - Element::Item(b"flagless".to_vec(), None) - ); - assert_eq!( - element_with_flag, - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!(tree_element_with_flag.get_flags(), &Some([1].to_vec())); - assert_eq!( - flagged_ref_follow, - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!( - flagged_ref_no_follow.to_key_elements()[0], - ( - b"elem4".to_vec(), - Element::Reference( - ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key1".to_vec(), - b"elem2".to_vec() - ]), - None, - Some([9].to_vec()) - ) + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key12", + Element::new_item(b"value12".to_vec()), + None, + None, ) - ); - - // Test proofs with flags - let mut query = Query::new(); - query.insert_all(); - - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"key1".to_vec()], - SizedQuery::new(query, None, None), - ); - let proof = db - .prove_query(&path_query) - .unwrap() - .expect("should successfully create proof"); - let (root_hash, result_set) = - GroveDb::verify_query_raw(&proof, &path_query).expect("should verify proof"); - assert_eq!(root_hash, db.grove_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - assert_eq!( - Element::deserialize(&result_set[0].value).expect("should deserialize element"), - Element::Item(b"flagless".to_vec(), None) - ); - assert_eq!( - Element::deserialize(&result_set[1].value).expect("should deserialize element"), - Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) - ); - assert_eq!( - Element::deserialize(&result_set[2].value) - .expect("should deserialize element") - .get_flags(), - &Some([1].to_vec()) - ); -} - -#[test] -fn test_cannot_update_populated_tree_item() { - // This test shows that you cannot update a tree item - // in a way that disconnects it's root hash from that of - // the merk it points to. - let db = make_deep_tree(); - - let old_element = db - .get([TEST_LEAF].as_ref(), b"innertree", None) - .unwrap() - .expect("should fetch item"); - - let new_element = Element::empty_tree(); - db.insert( - [TEST_LEAF].as_ref(), - b"innertree", - new_element.clone(), - None, - None, - ) - .unwrap() - .expect_err("should not override tree"); - - let current_element = db - .get([TEST_LEAF].as_ref(), b"innertree", None) .unwrap() - .expect("should fetch item"); - - assert_eq!(current_element, old_element); - assert_ne!(current_element, new_element); -} - -#[test] -fn test_changes_propagated() { - let db = make_test_grovedb(); - let old_hash = db.root_hash(None).unwrap().unwrap(); - let element = Element::new_item(b"ayy".to_vec()); - - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) - .unwrap() - .expect("successful get"), - element - ); - assert_ne!(old_hash, db.root_hash(None).unwrap().unwrap()); -} - -// TODO: Add solid test cases to this - -#[test] -fn test_references() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"merk_1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_1"].as_ref(), - b"key1", - Element::new_item(b"value1".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_1"].as_ref(), - b"key2", - Element::new_item(b"value2".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF].as_ref(), - b"merk_2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // db.insert([TEST_LEAF, b"merk_2"].as_ref(), b"key2", - // Element::new_item(b"value2".to_vec()), None).expect("successful subtree - // insert"); - db.insert( - [TEST_LEAF, b"merk_2"].as_ref(), - b"key1", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"merk_1".to_vec(), - b"key1".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"merk_2"].as_ref(), - b"key2", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"merk_1".to_vec(), - b"key2".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - assert!(db - .get([TEST_LEAF].as_ref(), b"merk_1", None) + .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key13", + Element::new_item(b"value13".to_vec()), + None, + None, + ) .unwrap() - .is_ok()); - assert!(db - .get([TEST_LEAF].as_ref(), b"merk_2", None) + .expect("successful subtree insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_2", b"deeper_5"].as_ref(), + b"key14", + Element::new_item(b"value14".to_vec()), + None, + None, + ) .unwrap() - .is_ok()); + .expect("successful subtree insert"); + temp_db } -#[test] -fn test_follow_references() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); - - // Insert an item to refer to - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Insert a reference - db.insert( - [TEST_LEAF].as_ref(), - b"reference_key", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - ])), - None, - None, - ) - .unwrap() - .expect("successful reference insert"); +pub fn make_deep_tree_with_sum_trees() -> TempGroveDb { + // Tree Structure + // root + // deep_leaf + // deep_node_1 + // "" -> "empty" + // a -> "storage" + // c + // 1 (sum tree) + // [0;32], 1 + // [1;32], 1 + // d + // 0,v1 + // 1 (sum tree) + // [0;32], 4 + // [1;32], 1 + // e + // 0,v4 + // 1 (sum tree) + // [0;32], 1 + // [1;32], 4 + // f + // 0,v1 + // 1 (sum tree) + // [0;32], 1 + // [1;32], 4 + // g + // 0,v4 + // 1 (sum tree) + // [3;32], 4 + // [5;32], 4 + // h -> "h" + // .. -> .. + // z -> "z" - assert_eq!( - db.get([TEST_LEAF].as_ref(), b"reference_key", None) - .unwrap() - .expect("successful get"), - element - ); -} + let temp_db = make_test_grovedb(); -#[test] -fn test_reference_must_point_to_item() { - let db = make_test_grovedb(); + // Add deep_leaf to root + temp_db + .insert(EMPTY_PATH, DEEP_LEAF, Element::empty_tree(), None, None) + .unwrap() + .expect("successful root tree leaf insert"); - let result = db + // Add deep_node_1 to deep_leaf + temp_db .insert( - [TEST_LEAF].as_ref(), - b"reference_key_1", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"reference_key_2".to_vec(), - ])), + [DEEP_LEAF].as_ref(), + b"deep_node_1", + Element::empty_tree(), None, None, ) - .unwrap(); - - assert!(matches!(result, Err(Error::MissingReference(_)))); -} - -#[test] -fn test_too_many_indirections() { - use crate::operations::get::MAX_REFERENCE_HOPS; - let db = make_test_grovedb(); - - let keygen = |idx| format!("key{}", idx).bytes().collect::>(); - - db.insert( - [TEST_LEAF].as_ref(), - b"key0", - Element::new_item(b"oops".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful item insert"); + .unwrap() + .expect("successful subtree insert"); - for i in 1..=(MAX_REFERENCE_HOPS) { - db.insert( - [TEST_LEAF].as_ref(), - &keygen(i), - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - keygen(i - 1), - ])), + // Add a -> "storage" to deep_node_1 + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + b"", + Element::new_item("empty".as_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful reference insert"); - } - - // Add one more reference - db.insert( - [TEST_LEAF].as_ref(), - &keygen(MAX_REFERENCE_HOPS + 1), - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - keygen(MAX_REFERENCE_HOPS), - ])), - None, - None, - ) - .unwrap() - .expect("expected insert"); - - let result = db - .get([TEST_LEAF].as_ref(), &keygen(MAX_REFERENCE_HOPS + 1), None) - .unwrap(); - - assert!(matches!(result, Err(Error::ReferenceLimit))); -} + .expect("successful item insert"); -#[test] -fn test_reference_value_affects_state() { - let db_one = make_test_grovedb(); - db_one + // Add a -> "storage" to deep_node_1 + temp_db .insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::new_item(vec![0]), + [DEEP_LEAF, b"deep_node_1"].as_ref(), + b"a", + Element::new_item("storage".as_bytes().to_vec()), None, None, ) .unwrap() - .expect("should insert item"); - db_one + .expect("successful item insert"); + + // Add c, d, e, f, g to deep_node_1 + for key in [b"c", b"d", b"e", b"f", b"g"].iter() { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + key.as_slice(), + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + } + + // Add sum tree to c + temp_db .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"ref", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"key1".to_vec(), - ])), + [DEEP_LEAF, b"deep_node_1", b"c"].as_ref(), + b"1", + Element::new_sum_tree(None), None, None, ) .unwrap() - .expect("should insert item"); + .expect("successful sum tree insert"); - let db_two = make_test_grovedb(); - db_two + // Add items to sum tree in c + temp_db .insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::new_item(vec![0]), + [DEEP_LEAF, b"deep_node_1", b"c", b"1"].as_ref(), + &[0; 32], + Element::SumItem(1, None), None, None, ) .unwrap() - .expect("should insert item"); - db_two + .expect("successful sum item insert"); + temp_db .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"ref", - Element::new_reference(ReferencePathType::UpstreamRootHeightReference( - 0, - vec![TEST_LEAF.to_vec(), b"key1".to_vec()], - )), + [DEEP_LEAF, b"deep_node_1", b"c", b"1"].as_ref(), + &[1; 32], + Element::SumItem(1, None), None, None, ) .unwrap() - .expect("should insert item"); + .expect("successful sum item insert"); - assert_ne!( - db_one - .root_hash(None) + // Add items to 4, 5, 6, 7 + for (key, value) in [(b"d", b"v1"), (b"e", b"v4"), (b"f", b"v1"), (b"g", b"v4")].iter() { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice()].as_ref(), + b"0", + Element::new_item(value.to_vec()), + None, + None, + ) .unwrap() - .expect("should return root hash"), - db_two - .root_hash(None) + .expect("successful item insert"); + + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice()].as_ref(), + b"1", + Element::new_sum_tree(None), + None, + None, + ) .unwrap() - .expect("should return toor hash") - ); -} + .expect("successful sum tree insert"); + } -#[test] -fn test_tree_structure_is_persistent() { - let tmp_dir = TempDir::new().unwrap(); - let element = Element::new_item(b"ayy".to_vec()); - // Create a scoped GroveDB - let prev_root_hash = { - let mut db = GroveDb::open(tmp_dir.path()).unwrap(); - add_test_leaves(&mut db); + // Add items to sum trees in d, e, f + for key in [b"d", b"e", b"f"].iter() { + let (value1, value2) = if *key == b"d" { (4, 1) } else { (1, 4) }; - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice(), b"1"].as_ref(), + &[0; 32], + Element::SumItem(value1, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", key.as_slice(), b"1"].as_ref(), + &[1; 32], + Element::SumItem(value2, None), + None, + None, + ) + .unwrap() + .expect("successful sum item insert"); + } + + // Add items to sum tree in 7 + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"g", b"1"].as_ref(), + &[3; 32], + Element::SumItem(4, None), None, None, ) .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), + .expect("successful sum item insert"); + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1", b"g", b"1"].as_ref(), + &[5; 32], + Element::SumItem(4, None), None, None, ) .unwrap() - .expect("successful value insert"); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) - .unwrap() - .expect("successful get 1"), - element - ); - db.root_hash(None).unwrap().unwrap() - }; - // Open a persisted GroveDB - let db = GroveDb::open(tmp_dir).unwrap(); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) - .unwrap() - .expect("successful get 2"), - element - ); - assert!(db - .get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key4", None) - .unwrap() - .is_err()); - assert_eq!(prev_root_hash, db.root_hash(None).unwrap().unwrap()); -} - -#[test] -fn test_root_tree_leaves_are_noted() { - let db = make_test_grovedb(); - db.check_subtree_exists_path_not_found([TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("should exist"); - db.check_subtree_exists_path_not_found([ANOTHER_TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("should exist"); -} + .expect("successful sum item insert"); -#[test] -fn test_proof_for_invalid_path_root_key() { - let db = make_test_grovedb(); + // Add entries for all letters from "h" to "z" + for letter in b'h'..=b'z' { + temp_db + .insert( + [DEEP_LEAF, b"deep_node_1"].as_ref(), + &[letter], + Element::new_item(vec![letter]), + None, + None, + ) + .unwrap() + .expect(&format!("successful item insert for {}", letter as char)); + } - let query = Query::new(); - let path_query = PathQuery::new_unsized(vec![b"invalid_path_key".to_vec()], query); + temp_db +} - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); +mod tests { + use super::*; - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + #[test] + fn test_init() { + let tmp_dir = TempDir::new().unwrap(); + GroveDb::open(tmp_dir).expect("empty tree is ok"); + } -#[test] -fn test_proof_for_invalid_path() { - let db = make_deep_tree(); + #[test] + fn test_element_with_flags() { + let db = make_test_grovedb(); - let query = Query::new(); - let path_query = - PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"invalid_key".to_vec()], query); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem1", + Element::new_item(b"flagless".to_vec()), + None, + None, + ) + .unwrap() + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem2", + Element::new_item_with_flags(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())), + None, + None, + ) + .unwrap() + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"elem3", + Element::new_tree_with_flags(None, Some([1].to_vec())), + None, + None, + ) + .unwrap() + .expect("should insert subtree successfully"); + db.insert( + [TEST_LEAF, b"key1", b"elem3"].as_ref(), + b"elem4", + Element::new_reference_with_flags( + ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"key1".to_vec(), + b"elem2".to_vec(), + ]), + Some([9].to_vec()), + ), + None, + None, + ) + .unwrap() + .expect("should insert subtree successfully"); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let element_without_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem1", None) + .unwrap() + .expect("should get successfully"); + let element_with_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem2", None) + .unwrap() + .expect("should get successfully"); + let tree_element_with_flag = db + .get([TEST_LEAF, b"key1"].as_ref(), b"elem3", None) + .unwrap() + .expect("should get successfully"); + let flagged_ref_follow = db + .get([TEST_LEAF, b"key1", b"elem3"].as_ref(), b"elem4", None) + .unwrap() + .expect("should get successfully"); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); + let mut query = Query::new(); + query.insert_key(b"elem4".to_vec()); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"key1".to_vec(), b"elem3".to_vec()], + SizedQuery::new(query, None, None), + ); + let (flagged_ref_no_follow, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryKeyElementPairResultType, + None, + ) + .unwrap() + .expect("should get successfully"); - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); - - let query = Query::new(); - let path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"early_invalid_key".to_vec(), - b"deeper_1".to_vec(), - b"invalid_key".to_vec(), - ], - query, - ); + assert_eq!( + element_without_flag, + Element::Item(b"flagless".to_vec(), None) + ); + assert_eq!( + element_with_flag, + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!(tree_element_with_flag.get_flags(), &Some([1].to_vec())); + assert_eq!( + flagged_ref_follow, + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!( + flagged_ref_no_follow.to_key_elements()[0], + ( + b"elem4".to_vec(), + Element::Reference( + ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"key1".to_vec(), + b"elem2".to_vec() + ]), + None, + Some([9].to_vec()) + ) + ) + ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + // Test proofs with flags + let mut query = Query::new(); + query.insert_all(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"key1".to_vec()], + SizedQuery::new(query, None, None), + ); + let proof = db + .prove_query(&path_query, None) + .unwrap() + .expect("should successfully create proof"); + let (root_hash, result_set) = + GroveDb::verify_query_raw(&proof, &path_query).expect("should verify proof"); + assert_eq!(root_hash, db.grove_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + assert_eq!( + Element::deserialize(&result_set[0].value).expect("should deserialize element"), + Element::Item(b"flagless".to_vec(), None) + ); + assert_eq!( + Element::deserialize(&result_set[1].value).expect("should deserialize element"), + Element::Item(b"flagged".to_vec(), Some([4, 5, 6, 7, 8].to_vec())) + ); + assert_eq!( + Element::deserialize(&result_set[2].value) + .expect("should deserialize element") + .get_flags(), + &Some([1].to_vec()) + ); + } -#[test] -fn test_proof_for_non_existent_data() { - let temp_db = make_test_grovedb(); + #[test] + fn test_cannot_update_populated_tree_item() { + // This test shows that you cannot update a tree item + // in a way that disconnects it's root hash from that of + // the merk it points to. + let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_key(b"key1".to_vec()); + let old_element = db + .get([TEST_LEAF].as_ref(), b"innertree", None) + .unwrap() + .expect("should fetch item"); - // path to empty subtree - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + let new_element = Element::empty_tree(); + db.insert( + [TEST_LEAF].as_ref(), + b"innertree", + new_element.clone(), + None, + None, + ) + .unwrap() + .expect_err("should not override tree"); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let current_element = db + .get([TEST_LEAF].as_ref(), b"innertree", None) + .unwrap() + .expect("should fetch item"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + assert_eq!(current_element, old_element); + assert_ne!(current_element, new_element); + } -#[test] -fn test_path_query_proofs_without_subquery_with_reference() { - // Tree Structure - // root - // test_leaf - // innertree - // k1,v1 - // k2,v2 - // k3,v3 - // another_test_leaf - // innertree2 - // k3,v3 - // k4, reference to k1 in innertree - // k5, reference to k4 in innertree3 - // innertree3 - // k4,v4 + #[test] + fn test_changes_propagated() { + let db = make_test_grovedb(); + let old_hash = db.root_hash(None).unwrap().unwrap(); + let element = Element::new_item(b"ayy".to_vec()); - // Insert elements into grovedb instance - let temp_db = make_test_grovedb(); - // Insert level 1 nodes - temp_db - .insert( + // Insert some nested subtrees + db.insert( [TEST_LEAF].as_ref(), - b"innertree", + b"key1", Element::empty_tree(), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree2", + .expect("successful subtree 1 insert"); + + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", Element::empty_tree(), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree3", + .expect("successful subtree 2 insert"); + + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get"), + element + ); + assert_ne!(old_hash, db.root_hash(None).unwrap().unwrap()); + } + + // TODO: Add solid test cases to this + + #[test] + fn test_references() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"merk_1", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert level 2 nodes - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), + db.insert( + [TEST_LEAF, b"merk_1"].as_ref(), b"key1", Element::new_item(b"value1".to_vec()), None, @@ -1192,9 +951,8 @@ fn test_path_query_proofs_without_subquery_with_reference() { ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), + db.insert( + [TEST_LEAF, b"merk_1"].as_ref(), b"key2", Element::new_item(b"value2".to_vec()), None, @@ -1202,1767 +960,2419 @@ fn test_path_query_proofs_without_subquery_with_reference() { ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), + + db.insert( + [TEST_LEAF].as_ref(), + b"merk_2", + Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), + // db.insert([TEST_LEAF, b"merk_2"].as_ref(), b"key2", + // Element::new_item(b"value2".to_vec()), None).expect("successful subtree + // insert"); + db.insert( + [TEST_LEAF, b"merk_2"].as_ref(), + b"key1", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"merk_1".to_vec(), + b"key1".to_vec(), + ])), None, None, ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key4", + db.insert( + [TEST_LEAF, b"merk_2"].as_ref(), + b"key2", Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ TEST_LEAF.to_vec(), - b"innertree".to_vec(), - b"key1".to_vec(), + b"merk_1".to_vec(), + b"key2".to_vec(), ])), None, None, ) .unwrap() .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), - b"key4", - Element::new_item(b"value4".to_vec()), + assert!(db + .get([TEST_LEAF].as_ref(), b"merk_1", None) + .unwrap() + .is_ok()); + assert!(db + .get([TEST_LEAF].as_ref(), b"merk_2", None) + .unwrap() + .is_ok()); + } + + #[test] + fn test_follow_references() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); + + // Insert an item to refer to + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key5", + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + // Insert a reference + db.insert( + [TEST_LEAF].as_ref(), + b"reference_key", Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - ANOTHER_TEST_LEAF.to_vec(), - b"innertree3".to_vec(), - b"key4".to_vec(), + TEST_LEAF.to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), ])), None, None, ) .unwrap() - .expect("successful subtree insert"); - - // Single key query - let mut query = Query::new(); - query.insert_range_from(b"key4".to_vec()..); - - let path_query = PathQuery::new_unsized( - vec![ANOTHER_TEST_LEAF.to_vec(), b"innertree2".to_vec()], - query, - ); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - assert_eq!( - hex::encode(&proof), - "010285010198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01bb979cb\ - e4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2b\ - b83ff3a40504d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6\ - 579350009000676616c75653400f084ffdbc429a89c9b6620e7224d73c2e\ - e505eb7e6fb5eb574e1a8dc8b0d0884110158040a696e6e6572747265653\ - 200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4\ - 709357d40de223d769547ec21013a090155ea7d14038c7062d94930798f8\ - 85a19d6ebff8a87489a1debf665604711015e02cfb7d035b8f4a3631be46\ - c597510a16770c15c74331b3dc8dcb577a206e49675040a746573745f6c6\ - 5616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a\ - 988c0ef1372178245152fa9a43b209a6b6d95589bdc11" - ); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); - let r2 = Element::new_item(b"value4".to_vec()).serialize().unwrap(); - - compare_result_tuples( - result_set, - vec![(b"key4".to_vec(), r1), (b"key5".to_vec(), r2)], - ); -} + .expect("successful reference insert"); -#[test] -fn test_path_query_proofs_without_subquery() { - // Tree Structure - // root - // test_leaf - // innertree - // k1,v1 - // k2,v2 - // k3,v3 - // another_test_leaf - // innertree2 - // k3,v3 - // innertree3 - // k4,v4 + assert_eq!( + db.get([TEST_LEAF].as_ref(), b"reference_key", None) + .unwrap() + .expect("successful get"), + element + ); + } - // Insert elements into grovedb instance - let temp_db = make_test_grovedb(); - // Insert level 1 nodes - temp_db - .insert( + #[test] + fn test_reference_must_point_to_item() { + let db = make_test_grovedb(); + + let result = db + .insert( + [TEST_LEAF].as_ref(), + b"reference_key_1", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"reference_key_2".to_vec(), + ])), + None, + None, + ) + .unwrap(); + + assert!(matches!(result, Err(Error::MissingReference(_)))); + } + + #[test] + fn test_too_many_indirections() { + use crate::operations::get::MAX_REFERENCE_HOPS; + let db = make_test_grovedb(); + + let keygen = |idx| format!("key{}", idx).bytes().collect::>(); + + db.insert( [TEST_LEAF].as_ref(), - b"innertree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"innertree3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert level 2 nodes - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key1", - Element::new_item(b"value1".to_vec()), + b"key0", + Element::new_item(b"oops".to_vec()), None, None, ) .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key2", - Element::new_item(b"value2".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), - b"key3", - Element::new_item(b"value3".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - temp_db - .insert( - [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), - b"key4", - Element::new_item(b"value4".to_vec()), + .expect("successful item insert"); + + for i in 1..=(MAX_REFERENCE_HOPS) { + db.insert( + [TEST_LEAF].as_ref(), + &keygen(i), + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + keygen(i - 1), + ])), + None, + None, + ) + .unwrap() + .expect("successful reference insert"); + } + + // Add one more reference + db.insert( + [TEST_LEAF].as_ref(), + &keygen(MAX_REFERENCE_HOPS + 1), + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + keygen(MAX_REFERENCE_HOPS), + ])), None, None, ) .unwrap() - .expect("successful subtree insert"); - - // Single key query - let mut query = Query::new(); - query.insert_key(b"key1".to_vec()); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - assert_eq!( - hex::encode(proof.as_slice()), - "01025503046b6579310009000676616c7565310002018655e18e4555b0b65\ - bbcec64c749db6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1\ - c82951c41fcfa6487711cac6a399ebb01bb979cbe4a51e0b2f08d1101350\ - 409696e6e65727472656500080201046b657932004910536da659a3dbdbc\ - f68c4a6630e72de4ba20cfc60b08b3dd45b4225a599b6015c04097465737\ - 45f6c656166000d020109696e6e65727472656500fafa16d06e8d8696dae\ - 443731ae2a4eae521e4a9a79c331c8a7e22e34c0f1a6e01b55f830550604\ - 719833d54ce2bf139aff4bb699fa4111b9741633554318792c511" - ); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key1".to_vec(), r1)]); - - // Range query + limit - let mut query = Query::new(); - query.insert_range_after(b"key1".to_vec()..); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(1), None), - ); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key2".to_vec(), r1)]); - - // Range query + offset + limit - let mut query = Query::new(); - query.insert_range_after(b"key1".to_vec()..); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(1), Some(1)), - ); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); - compare_result_tuples(result_set, vec![(b"key3".to_vec(), r1)]); - - // Range query + direction + limit - let mut query = Query::new_with_direction(false); - query.insert_all(); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(2), None), - ); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); - let r2 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); - compare_result_tuples( - result_set, - vec![(b"key3".to_vec(), r1), (b"key2".to_vec(), r2)], - ); -} - -#[test] -fn test_path_query_proofs_with_default_subquery() { - let temp_db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - let mut query = Query::new(); - query.insert_range_after(b"innertree".to_vec()..); - - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - - let keys = [b"key4".to_vec(), b"key5".to_vec()]; - let values = [b"value4".to_vec(), b"value5".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // range subquery - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_range_after_to_inclusive(b"key1".to_vec()..=b"key4".to_vec()); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect( - "should - execute proof", - ); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key2".to_vec(), b"key3".to_vec(), b"key4".to_vec()]; - let values = [b"value2".to_vec(), b"value3".to_vec(), b"value4".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // deep tree test - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - let mut sub_subquery = Query::new(); - sub_subquery.insert_all(); - - subq.set_subquery(sub_subquery); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key7".to_vec(), - b"key8".to_vec(), - b"key9".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value7".to_vec(), - b"value8".to_vec(), - b"value9".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} - -#[test] -fn test_path_query_proofs_with_subquery_path() { - let temp_db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_key(b"deeper_1".to_vec()); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with valid n > 1 valid translation - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with empty subquery path - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![]); - query.set_subquery(subq); - - let path_query = - PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"deep_node_1".to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 6); - - let keys = [ - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - ]; - let values = [ - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // test subquery path with an invalid translation - // should generate a valid absence proof with an empty result set - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new(); - subq.insert_all(); - - query.set_subquery_path(vec![ - b"deep_node_1".to_vec(), - b"deeper_10".to_vec(), - b"another_invalid_key".to_vec(), - ]); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + .expect("expected insert"); -#[test] -fn test_path_query_proofs_with_key_and_subquery() { - let temp_db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_key(b"deep_node_1".to_vec()); - - let mut subq = Query::new(); - subq.insert_all(); + let result = db + .get([TEST_LEAF].as_ref(), &keygen(MAX_REFERENCE_HOPS + 1), None) + .unwrap(); - query.set_subquery_key(b"deeper_1".to_vec()); - query.set_subquery(subq); + assert!(matches!(result, Err(Error::ReferenceLimit))); + } - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + #[test] + fn test_reference_value_affects_state() { + let db_one = make_test_grovedb(); + db_one + .insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(vec![0]), + None, + None, + ) + .unwrap() + .expect("should insert item"); + db_one + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"ref", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"key1".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("should insert item"); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + let db_two = make_test_grovedb(); + db_two + .insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(vec![0]), + None, + None, + ) + .unwrap() + .expect("should insert item"); + db_two + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"ref", + Element::new_reference(ReferencePathType::UpstreamRootHeightReference( + 0, + vec![TEST_LEAF.to_vec(), b"key1".to_vec()], + )), + None, + None, + ) + .unwrap() + .expect("should insert item"); - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); + assert_ne!( + db_one + .root_hash(None) + .unwrap() + .expect("should return root hash"), + db_two + .root_hash(None) + .unwrap() + .expect("should return toor hash") + ); + } - let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; - let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + #[test] + fn test_tree_structure_is_persistent() { + let tmp_dir = TempDir::new().unwrap(); + let element = Element::new_item(b"ayy".to_vec()); + // Create a scoped GroveDB + let prev_root_hash = { + let mut db = GroveDb::open(tmp_dir.path()).unwrap(); + add_test_leaves(&mut db); + + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get 1"), + element + ); + db.root_hash(None).unwrap().unwrap() + }; + // Open a persisted GroveDB + let db = GroveDb::open(tmp_dir).unwrap(); + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("successful get 2"), + element + ); + assert!(db + .get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key4", None) + .unwrap() + .is_err()); + assert_eq!(prev_root_hash, db.root_hash(None).unwrap().unwrap()); + } -#[test] -fn test_path_query_proofs_with_conditional_subquery() { - let temp_db = make_deep_tree(); + #[test] + fn test_root_tree_leaves_are_noted() { + let db = make_test_grovedb(); + db.check_subtree_exists_path_not_found([TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("should exist"); + db.check_subtree_exists_path_not_found([ANOTHER_TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("should exist"); + } - let mut query = Query::new(); - query.insert_all(); + #[test] + fn test_proof_for_invalid_path_root_key() { + let db = make_test_grovedb(); - let mut subquery = Query::new(); - subquery.insert_all(); + let query = Query::new(); + let path_query = PathQuery::new_unsized(vec![b"invalid_path_key".to_vec()], query); - let mut final_subquery = Query::new(); - final_subquery.insert_all(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_subquery), - ); - - query.set_subquery(subquery); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - - let keys = [ - b"deeper_1".to_vec(), - b"deeper_2".to_vec(), - b"deeper_3".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - ]; - assert_eq!(result_set.len(), keys.len()); - - // TODO: Is this defined behaviour - for (index, key) in keys.iter().enumerate() { - assert_eq!(&result_set[index].key, key); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); } - // Default + Conditional subquery - let mut query = Query::new(); - query.insert_all(); - - let mut subquery = Query::new(); - subquery.insert_all(); - - let mut final_conditional_subquery = Query::new(); - final_conditional_subquery.insert_all(); - - let mut final_default_subquery = Query::new(); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + #[test] + fn test_proof_for_invalid_path() { + let db = make_deep_tree(); + + let query = Query::new(); + let path_query = + PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"invalid_key".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); - - query.set_subquery(subquery); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 6); - - let keys = [ - b"key3".to_vec(), - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - ]; - let values = [ - b"value3".to_vec(), - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - ]; - let elements = values - .map(|x| Element::new_item(x).serialize().unwrap()) - .to_vec(); - // compare_result_sets(&elements, &result_set); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); -#[test] -fn test_path_query_proofs_with_sized_query() { - let temp_db = make_deep_tree(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + + let query = Query::new(); + let path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"early_invalid_key".to_vec(), + b"deeper_1".to_vec(), + b"invalid_key".to_vec(), + ], + query, + ); - let mut query = Query::new(); - query.insert_all(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - let mut subquery = Query::new(); - subquery.insert_all(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } - let mut final_conditional_subquery = Query::new(); - final_conditional_subquery.insert_all(); + #[test] + fn test_proof_for_non_existent_data() { + let temp_db = make_test_grovedb(); - let mut final_default_subquery = Query::new(); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + let mut query = Query::new(); + query.insert_key(b"key1".to_vec()); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); - - query.set_subquery(subquery); - - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), Some(1)), - ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key4".to_vec(), b"key5".to_vec(), b"key6".to_vec()]; - let values = [b"value4".to_vec(), b"value5".to_vec(), b"value6".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + // path to empty subtree + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); -#[test] -fn test_path_query_proofs_with_direction() { - let temp_db = make_deep_tree(); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - let mut query = Query::new_with_direction(false); - query.insert_all(); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); + } - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + #[test] + fn test_path_query_proofs_without_subquery_with_reference() { + // Tree Structure + // root + // test_leaf + // innertree + // k1,v1 + // k2,v2 + // k3,v3 + // another_test_leaf + // innertree2 + // k3,v3 + // k4, reference to k1 in innertree + // k5, reference to k4 in innertree3 + // innertree3 + // k4,v4 + + // Insert elements into grovedb instance + let temp_db = make_test_grovedb(); + // Insert level 1 nodes + temp_db + .insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert level 2 nodes + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"value1".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key2", + Element::new_item(b"value2".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key4", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"innertree".to_vec(), + b"key1".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), + b"key4", + Element::new_item(b"value4".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key5", + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + ANOTHER_TEST_LEAF.to_vec(), + b"innertree3".to_vec(), + b"key4".to_vec(), + ])), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); - let mut final_conditional_subquery = Query::new_with_direction(false); - final_conditional_subquery.insert_all(); + // Single key query + let mut query = Query::new(); + query.insert_range_from(b"key4".to_vec()..); - let mut final_default_subquery = Query::new_with_direction(false); - final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + let path_query = PathQuery::new_unsized( + vec![ANOTHER_TEST_LEAF.to_vec(), b"innertree2".to_vec()], + query, + ); - subquery.add_conditional_subquery( - QueryItem::Key(b"deeper_4".to_vec()), - None, - Some(final_conditional_subquery), - ); - subquery.set_subquery(final_default_subquery); - - query.set_subquery(subquery); - - let path_query = PathQuery::new( - vec![DEEP_LEAF.to_vec()], - SizedQuery::new(query, Some(3), Some(1)), - ); - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - let keys = [b"key10".to_vec(), b"key6".to_vec(), b"key5".to_vec()]; - let values = [b"value10".to_vec(), b"value6".to_vec(), b"value5".to_vec()]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); - - // combined directions - let mut query = Query::new(); - query.insert_all(); - - let mut subq = Query::new_with_direction(false); - subq.insert_all(); - - let mut sub_subquery = Query::new(); - sub_subquery.insert_all(); - - subq.set_subquery(sub_subquery); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - - let proof = temp_db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = - GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - - assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); - - let keys = [ - b"key4".to_vec(), - b"key5".to_vec(), - b"key6".to_vec(), - b"key1".to_vec(), - b"key2".to_vec(), - b"key3".to_vec(), - b"key10".to_vec(), - b"key11".to_vec(), - b"key7".to_vec(), - b"key8".to_vec(), - b"key9".to_vec(), - ]; - let values = [ - b"value4".to_vec(), - b"value5".to_vec(), - b"value6".to_vec(), - b"value1".to_vec(), - b"value2".to_vec(), - b"value3".to_vec(), - b"value10".to_vec(), - b"value11".to_vec(), - b"value7".to_vec(), - b"value8".to_vec(), - b"value9".to_vec(), - ]; - let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); - let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); - compare_result_tuples(result_set, expected_result_set); -} + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + assert_eq!( + hex::encode(&proof), + "005e02cfb7d035b8f4a3631be46c597510a16770c15c74331b3dc8dcb577a206e49675040a746\ + 573745f6c65616632000e02010a696e6e657274726565320049870f2813c0c3c5c105a988c0ef1\ + 372178245152fa9a43b209a6b6d95589bdc11010a746573745f6c6561663258040a696e6e65727\ + 47265653200080201046b657934008ba21f835b2ff60f16b7fccfbda107bec3da0c4709357d40d\ + e223d769547ec21013a090155ea7d14038c7062d94930798f885a19d6ebff8a87489a1debf6656\ + 04711010a696e6e65727472656532850198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ + b979cbe4a51e0b2f08d06046b6579340009000676616c75653100bf2f052b01c2bb83ff3a40504\ + d42b5b9141c582a3e0c98679189b33a24478a6f1006046b6579350009000676616c75653400f08\ + 4ffdbc429a89c9b6620e7224d73c2ee505eb7e6fb5eb574e1a8dc8b0d0884110001" + ); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); -#[test] -fn test_checkpoint() { - let db = make_test_grovedb(); - let element1 = Element::new_item(b"ayy".to_vec()); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); + let r2 = Element::new_item(b"value4".to_vec()).serialize().unwrap(); - db.insert(EMPTY_PATH, b"key1", Element::empty_tree(), None, None) - .unwrap() - .expect("cannot insert a subtree 1 into GroveDB"); - db.insert( - [b"key1".as_ref()].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("cannot insert a subtree 2 into GroveDB"); - db.insert( - [b"key1".as_ref(), b"key2".as_ref()].as_ref(), - b"key3", - element1.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert an item into GroveDB"); + compare_result_tuples( + result_set, + vec![(b"key4".to_vec(), r1), (b"key5".to_vec(), r2)], + ); + } - assert_eq!( - db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + #[test] + fn test_path_query_proofs_without_subquery() { + // Tree Structure + // root + // test_leaf + // innertree + // k1,v1 + // k2,v2 + // k3,v3 + // another_test_leaf + // innertree2 + // k3,v3 + // innertree3 + // k4,v4 + + // Insert elements into grovedb instance + let temp_db = make_test_grovedb(); + // Insert level 1 nodes + temp_db + .insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("cannot get from grovedb"), - element1 - ); - - let tempdir_parent = TempDir::new().expect("cannot open tempdir"); - let checkpoint_tempdir = tempdir_parent.path().join("checkpoint"); - db.create_checkpoint(&checkpoint_tempdir) - .expect("cannot create checkpoint"); - - let checkpoint_db = - GroveDb::open(checkpoint_tempdir).expect("cannot open grovedb from checkpoint"); - - assert_eq!( - db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree2", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("cannot get from grovedb"), - element1 - ); - assert_eq!( - checkpoint_db - .get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"innertree3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert level 2 nodes + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"value1".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key2", + Element::new_item(b"value2".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot get from checkpoint"), - element1 - ); + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree2"].as_ref(), + b"key3", + Element::new_item(b"value3".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + temp_db + .insert( + [ANOTHER_TEST_LEAF, b"innertree3"].as_ref(), + b"key4", + Element::new_item(b"value4".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); - let element2 = Element::new_item(b"ayy2".to_vec()); - let element3 = Element::new_item(b"ayy3".to_vec()); + // Single key query + let mut query = Query::new(); + query.insert_key(b"key1".to_vec()); - checkpoint_db - .insert( - [b"key1".as_ref()].as_ref(), - b"key4", - element2.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert into checkpoint"); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - db.insert( - [b"key1".as_ref()].as_ref(), - b"key4", - element3.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert into GroveDB"); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + assert_eq!( + hex::encode(proof.as_slice()), + "005c0409746573745f6c656166000d020109696e6e65727472656500fafa16d06e8d8696dae443731\ + ae2a4eae521e4a9a79c331c8a7e22e34c0f1a6e01b55f830550604719833d54ce2bf139aff4bb699fa\ + 4111b9741633554318792c5110109746573745f6c656166350409696e6e65727472656500080201046\ + b657932004910536da659a3dbdbcf68c4a6630e72de4ba20cfc60b08b3dd45b4225a599b60109696e6\ + e6572747265655503046b6579310009000676616c7565310002018655e18e4555b0b65bbcec64c749d\ + b6b9ad84231969fb4fbe769a3093d10f2100198ebd6dc7e1c82951c41fcfa6487711cac6a399ebb01b\ + b979cbe4a51e0b2f08d110001" + ); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value1".to_vec()).serialize().unwrap(); + compare_result_tuples(result_set, vec![(b"key1".to_vec(), r1)]); + + // Range query + limit + let mut query = Query::new(); + query.insert_range_after(b"key1".to_vec()..); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(1), None), + ); - assert_eq!( - checkpoint_db - .get([b"key1".as_ref()].as_ref(), b"key4", None) - .unwrap() - .expect("cannot get from checkpoint"), - element2, - ); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert_eq!( - db.get([b"key1".as_ref()].as_ref(), b"key4", None) - .unwrap() - .expect("cannot get from GroveDB"), - element3 - ); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); + compare_result_tuples(result_set, vec![(b"key2".to_vec(), r1)]); - checkpoint_db - .insert( - [b"key1".as_ref()].as_ref(), - b"key5", - element3.clone(), - None, - None, - ) - .unwrap() - .expect("cannot insert into checkpoint"); + // Range query + direction + limit + let mut query = Query::new_with_direction(false); + query.insert_all(); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(2), None), + ); - db.insert([b"key1".as_ref()].as_ref(), b"key6", element3, None, None) - .unwrap() - .expect("cannot insert into GroveDB"); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - assert!(matches!( - checkpoint_db - .get([b"key1".as_ref()].as_ref(), b"key6", None) - .unwrap(), - Err(Error::PathKeyNotFound(_)) - )); - - assert!(matches!( - db.get([b"key1".as_ref()].as_ref(), b"key5", None).unwrap(), - Err(Error::PathKeyNotFound(_)) - )); -} + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + let r1 = Element::new_item(b"value3".to_vec()).serialize().unwrap(); + let r2 = Element::new_item(b"value2".to_vec()).serialize().unwrap(); + compare_result_tuples( + result_set, + vec![(b"key3".to_vec(), r1), (b"key2".to_vec(), r2)], + ); + } -#[test] -fn test_is_empty_tree() { - let db = make_test_grovedb(); + #[test] + fn test_path_query_proofs_with_default_subquery() { + let temp_db = make_deep_tree(); - // Create an empty tree with no elements - db.insert( - [TEST_LEAF].as_ref(), - b"innertree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .unwrap(); + let mut query = Query::new(); + query.insert_all(); - assert!(db - .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) - .unwrap() - .expect("path is valid tree")); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); - // add an element to the tree to make it non empty - db.insert( - [TEST_LEAF, b"innertree"].as_ref(), - b"key1", - Element::new_item(b"hello".to_vec()), - None, - None, - ) - .unwrap() - .unwrap(); - assert!(!db - .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) - .unwrap() - .expect("path is valid tree")); -} + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); -#[test] -fn transaction_should_be_aborted_when_rollback_is_called() { - let item_key = b"key3"; + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - let db = make_test_grovedb(); - let transaction = db.start_transaction(); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); - let element1 = Element::new_item(b"ayy".to_vec()); + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + let mut query = Query::new(); + query.insert_range_after(b"innertree".to_vec()..); + + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 2); + + let keys = [b"key4".to_vec(), b"key5".to_vec()]; + let values = [b"value4".to_vec(), b"value5".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // range subquery + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_range_after_to_inclusive(b"key1".to_vec()..=b"key4".to_vec()); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect( + "should + execute proof", + ); - let result = db - .insert( - [TEST_LEAF].as_ref(), - item_key, - element1, - None, - Some(&transaction), - ) - .unwrap(); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); - assert!(matches!(result, Ok(()))); + let keys = [b"key2".to_vec(), b"key3".to_vec(), b"key4".to_vec()]; + let values = [b"value2".to_vec(), b"value3".to_vec(), b"value4".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); - db.rollback_transaction(&transaction).unwrap(); + // deep tree test + let mut query = Query::new(); + query.insert_all(); - let result = db - .get([TEST_LEAF].as_ref(), item_key, Some(&transaction)) - .unwrap(); - assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); -} + let mut subq = Query::new(); + subq.insert_all(); -#[test] -fn transaction_should_be_aborted() { - let db = make_test_grovedb(); - let transaction = db.start_transaction(); + let mut sub_subquery = Query::new(); + sub_subquery.insert_all(); - let item_key = b"key3"; - let element = Element::new_item(b"ayy".to_vec()); + subq.set_subquery(sub_subquery); + query.set_subquery(subq); - db.insert( - [TEST_LEAF].as_ref(), - item_key, - element, - None, - Some(&transaction), - ) - .unwrap() - .unwrap(); + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); - drop(transaction); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); - // Transactional data shouldn't be committed to the main database - let result = db.get([TEST_LEAF].as_ref(), item_key, None).unwrap(); - assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); -} + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); -#[test] -fn test_subtree_pairs_iterator() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); - let element2 = Element::new_item(b"lmao".to_vec()); + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key7".to_vec(), + b"key8".to_vec(), + b"key9".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value7".to_vec(), + b"value8".to_vec(), + b"value9".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"subtree1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"subtree11", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), - b"key1", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - assert_eq!( - db.get( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), - b"key1", - None - ) - .unwrap() - .expect("successful get 1"), - element - ); - db.insert( - [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), - b"key0", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"subtree12", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"key1", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"subtree1"].as_ref(), - b"key2", - element2.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Iterate over subtree1 to see if keys of other subtrees messed up - // let mut iter = db - // .elements_iterator([TEST_LEAF, b"subtree1"].as_ref(), None) - // .expect("cannot create iterator"); - let storage_context = db - .grove_db - .db - .get_storage_context([TEST_LEAF, b"subtree1"].as_ref().into(), None) - .unwrap(); - let mut iter = Element::iterator(storage_context.raw_iter()).unwrap(); - assert_eq!( - iter.next_element().unwrap().unwrap(), - Some((b"key1".to_vec(), element)) - ); - assert_eq!( - iter.next_element().unwrap().unwrap(), - Some((b"key2".to_vec(), element2)) - ); - let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); - assert_eq!(subtree_element.0, b"subtree11".to_vec()); - assert!(matches!(subtree_element.1, Element::Tree(..))); - let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); - assert_eq!(subtree_element.0, b"subtree12".to_vec()); - assert!(matches!(subtree_element.1, Element::Tree(..))); - assert!(matches!(iter.next_element().unwrap(), Ok(None))); -} + #[test] + fn test_path_query_proofs_with_subquery_path() { + let temp_db = make_deep_tree(); -#[test] -fn test_find_subtrees() { - let element = Element::new_item(b"ayy".to_vec()); - let db = make_test_grovedb(); - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element, - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); - let subtrees = db - .find_subtrees(&[TEST_LEAF].as_ref().into(), None) - .unwrap() - .expect("cannot get subtrees"); - assert_eq!( - vec![ - vec![TEST_LEAF], - vec![TEST_LEAF, b"key1"], - vec![TEST_LEAF, b"key4"], - vec![TEST_LEAF, b"key1", b"key2"], - ], - subtrees - ); -} + let mut query = Query::new(); + query.insert_all(); -#[test] -fn test_root_subtree_has_root_key() { - let db = make_test_grovedb(); - let storage = db.db.get_storage_context(EMPTY_PATH, None).unwrap(); - let root_merk = Merk::open_base( - storage, - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("expected to get root merk"); - let (_, root_key, _) = root_merk - .root_hash_key_and_sum() - .unwrap() - .expect("expected to get root hash, key and sum"); - assert!(root_key.is_some()) -} + let mut subq = Query::new(); + subq.insert_all(); + + query.set_subquery_key(b"deeper_1".to_vec()); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // test subquery path with valid n > 1 valid translation + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_all(); + + query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); + query.set_subquery(subq); -#[test] -fn test_get_subtree() { - let db = make_test_grovedb(); - let element = Element::new_item(b"ayy".to_vec()); + let path_query = PathQuery::new_unsized(vec![], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); - // Returns error is subtree is not valid - { - let subtree = db.get([TEST_LEAF].as_ref(), b"invalid_tree", None).unwrap(); - assert!(subtree.is_err()); + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); - // Doesn't return an error for subtree that exists but empty - let subtree = db.get(EMPTY_PATH, TEST_LEAF, None).unwrap(); - assert!(subtree.is_ok()); + // test subquery path with empty subquery path + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_all(); + + query.set_subquery_path(vec![]); + query.set_subquery(subq); + + let path_query = + PathQuery::new_unsized(vec![b"deep_leaf".to_vec(), b"deep_node_1".to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 6); + + let keys = [ + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + ]; + let values = [ + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // test subquery path with an invalid translation + // should generate a valid absence proof with an empty result set + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new(); + subq.insert_all(); + + query.set_subquery_path(vec![ + b"deep_node_1".to_vec(), + b"deeper_10".to_vec(), + b"another_invalid_key".to_vec(), + ]); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 0); } - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - - let key1_tree = db - .get(EMPTY_PATH, TEST_LEAF, None) - .unwrap() - .expect("expected to get a root tree"); - - assert!( - matches!(key1_tree, Element::Tree(Some(_), _)), - "{}", - format!( - "expected tree with root key, got {:?}", - if let Element::Tree(tree, ..) = key1_tree { - format!("{:?}", tree) - } else { - "not a tree".to_string() - } - ) - ); + #[test] + fn test_path_query_proofs_with_key_and_subquery() { + let temp_db = make_deep_tree(); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); + let mut query = Query::new(); + query.insert_key(b"deep_node_1".to_vec()); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 3 insert"); + let mut subq = Query::new(); + subq.insert_all(); - // Retrieve subtree instance - // Check if it returns the same instance that was inserted - { - let subtree_storage = db - .grove_db - .db - .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) - .unwrap(); - let subtree = Merk::open_layered_with_root_key( - subtree_storage, - Some(b"key3".to_vec()), - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("cannot open merk"); - let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); - assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); + query.set_subquery_key(b"deeper_1".to_vec()); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + let keys = [b"key1".to_vec(), b"key2".to_vec(), b"key3".to_vec()]; + let values = [b"value1".to_vec(), b"value2".to_vec(), b"value3".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); } - // Insert a new tree with transaction - let transaction = db.start_transaction(); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"innertree", - Element::empty_tree(), - None, - Some(&transaction), - ) - .unwrap() - .expect("successful subtree insert"); + #[test] + fn test_path_query_proofs_with_conditional_subquery() { + let temp_db = make_deep_tree(); - db.insert( - [TEST_LEAF, b"key1", b"innertree"].as_ref(), - b"key4", - element, - None, - Some(&transaction), - ) - .unwrap() - .expect("successful value insert"); + let mut query = Query::new(); + query.insert_all(); + + let mut subquery = Query::new(); + subquery.insert_all(); - // Retrieve subtree instance with transaction - let subtree_storage = db - .grove_db - .db - .get_transactional_storage_context( - [TEST_LEAF, b"key1", b"innertree"].as_ref().into(), + let mut final_subquery = Query::new(); + final_subquery.insert_all(); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), None, - &transaction, - ) - .unwrap(); - let subtree = Merk::open_layered_with_root_key( - subtree_storage, - Some(b"key4".to_vec()), - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("cannot open merk"); - let result_element = Element::get(&subtree, b"key4", true).unwrap().unwrap(); - assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); - - // Should be able to retrieve instances created before transaction - let subtree_storage = db - .grove_db - .db - .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) - .unwrap(); - let subtree = Merk::open_layered_with_root_key( - subtree_storage, - Some(b"key3".to_vec()), - false, - Some(&Element::value_defined_cost_for_serialized_value), - ) - .unwrap() - .expect("cannot open merk"); - let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); - assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); -} + Some(final_subquery), + ); -#[test] -fn test_get_full_query() { - let db = make_test_grovedb(); + query.set_subquery(subquery); - // Insert a couple of subtrees first - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert some elements into subtree - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key3", - Element::new_item(b"ayya".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key4", - Element::new_item(b"ayyb".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key5", - Element::new_item(b"ayyc".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"key6", - Element::new_item(b"ayyd".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful value insert"); - - // Test_Leaf - // ___________________________ - // / \ - // key1 key2 - // ___________________________ - // | | - // key4 key6 - // / \ - // key3 key5 - // - - let path1 = vec![TEST_LEAF.to_vec(), b"key1".to_vec()]; - let path2 = vec![TEST_LEAF.to_vec(), b"key2".to_vec()]; - let mut query1 = Query::new(); - let mut query2 = Query::new(); - query1.insert_range_inclusive(b"key3".to_vec()..=b"key4".to_vec()); - query2.insert_key(b"key6".to_vec()); - - let path_query1 = PathQuery::new_unsized(path1, query1); - // should get back key3, key4 - let path_query2 = PathQuery::new_unsized(path2, query2); - // should get back key6 - - assert_eq!( - db.query_many_raw( - &[&path_query1, &path_query2], - true, - true, - true, - QueryKeyElementPairResultType, - None + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + + let keys = [ + b"deeper_1".to_vec(), + b"deeper_2".to_vec(), + b"deeper_3".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"deeper_5".to_vec(), + ]; + assert_eq!(result_set.len(), keys.len()); + + // TODO: Is this defined behaviour + for (index, key) in keys.iter().enumerate() { + assert_eq!(&result_set[index].key, key); + } + + // Default + Conditional subquery + let mut query = Query::new(); + query.insert_all(); + + let mut subquery = Query::new(); + subquery.insert_all(); + + let mut final_conditional_subquery = Query::new(); + final_conditional_subquery.insert_all(); + + let mut final_default_subquery = Query::new(); + final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); + + query.set_subquery(subquery); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 6); + + let keys = [ + b"key3".to_vec(), + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + ]; + let values = [ + b"value3".to_vec(), + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + ]; + let elements = values + .map(|x| Element::new_item(x).serialize().unwrap()) + .to_vec(); + // compare_result_sets(&elements, &result_set); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } + + #[test] + fn test_path_query_proofs_with_sized_query() { + let temp_db = make_deep_tree(); + + let mut query = Query::new(); + query.insert_all(); + + let mut subquery = Query::new(); + subquery.insert_all(); + + let mut final_conditional_subquery = Query::new(); + final_conditional_subquery.insert_all(); + + let mut final_default_subquery = Query::new(); + final_default_subquery.insert_range_inclusive(b"key4".to_vec()..=b"key6".to_vec()); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); + + query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec()], + SizedQuery::new(query, Some(5), None), /* we need to add a bigger limit because of + * empty proved sub trees */ + ); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + let keys = [b"key4".to_vec(), b"key5".to_vec(), b"key6".to_vec()]; + let values = [b"value4".to_vec(), b"value5".to_vec(), b"value6".to_vec()]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } + + #[test] + fn test_path_query_proof_with_range_subquery_and_limit() { + let db = make_deep_tree(); + + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_range_after(b"deeper_3".to_vec()..); + + let mut subquery = Query::new(); + subquery.insert_all(); + + main_query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); + + // Generate proof + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + + // Verify proof + let verification_result = GroveDb::verify_query_raw(&proof, &path_query); + + match verification_result { + Ok((hash, result_set)) => { + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + } + Err(e) => { + panic!("Proof verification failed: {:?}", e); + } + } + + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_2".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); + + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = + GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5, "Expected 5 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } + } + } + + #[test] + fn test_path_query_proof_with_range_subquery_and_limit_with_sum_trees() { + let db = make_deep_tree_with_sum_trees(); + + // Create a path query with a range query, subquery, and limit + let mut main_query = Query::new(); + main_query.insert_key(b"a".to_vec()); + main_query.insert_range_after(b"b".to_vec()..); + + let mut subquery = Query::new(); + subquery.insert_all(); + + main_query.set_subquery(subquery); + + main_query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, None); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], + SizedQuery::new(main_query.clone(), Some(3), None), + ); + + let non_proved_result_elements = db + .query( + &path_query, + false, + false, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected query to execute") + .0; + + assert_eq!( + non_proved_result_elements.len(), + 3, + "Expected 3 results due to limit" + ); + + let key_elements = non_proved_result_elements.to_key_elements(); + + assert_eq!( + key_elements, + vec![ + (vec![97], Element::new_item("storage".as_bytes().to_vec())), + (vec![49], Element::SumTree(Some(vec![0; 32]), 2, None)), + (vec![48], Element::new_item("v1".as_bytes().to_vec())) + ] + ); + + // Generate proof + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + + // Verify proof + let (hash, result_set) = + GroveDb::verify_query_raw(&proof, &path_query).expect("proof verification failed"); + + // Check if the hash matches the root hash + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // Check if we got the correct number of results + assert_eq!(result_set.len(), 3, "Expected 3 results due to limit"); + + // Now test without a limit to compare + let path_query_no_limit = PathQuery::new( + vec![DEEP_LEAF.to_vec(), b"deep_node_1".to_vec()], + SizedQuery::new(main_query.clone(), None, None), + ); + + let proof_no_limit = db.prove_query(&path_query_no_limit, None).unwrap().unwrap(); + let verification_result_no_limit = + GroveDb::verify_query_raw(&proof_no_limit, &path_query_no_limit); + + match verification_result_no_limit { + Ok((hash, result_set)) => { + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 29, "Expected 29 results without limit"); + } + Err(e) => { + panic!("Proof verification failed (no limit): {:?}", e); + } + } + } + + #[test] + fn test_path_query_proofs_with_direction() { + let temp_db = make_deep_tree(); + + // root + // deep_leaf + // deep_node_1 + // deeper_1 + // k1,v1 + // k2,v2 + // k3,v3 + // deeper_2 + // k4,v4 + // k5,v5 + // k6,v6 + // deep_node_2 + // deeper_3 + // k7,v7 + // k8,v8 + // k9,v9 + // deeper_4 + // k10,v10 + // k11,v11 + // deeper_5 + // k12,v12 + // k13,v13 + // k14,v14 + + let mut query = Query::new_with_direction(false); + query.insert_all(); + + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); + + let mut final_conditional_subquery = Query::new_with_direction(false); + final_conditional_subquery.insert_all(); + + let mut final_default_subquery = Query::new_with_direction(false); + final_default_subquery.insert_range_inclusive(b"key3".to_vec()..=b"key6".to_vec()); + + subquery.add_conditional_subquery( + QueryItem::Key(b"deeper_4".to_vec()), + None, + Some(final_conditional_subquery), + ); + subquery.set_subquery(final_default_subquery); + + query.set_subquery(subquery); + + let path_query = PathQuery::new( + vec![DEEP_LEAF.to_vec()], + SizedQuery::new(query, Some(6), None), /* we need 6 because of intermediate empty + * trees in proofs */ + ); + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + let keys = [ + b"key11".to_vec(), + b"key10".to_vec(), + b"key6".to_vec(), + b"key5".to_vec(), + ]; + let values = [ + b"value11".to_vec(), + b"value10".to_vec(), + b"value6".to_vec(), + b"value5".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + + // combined directions + let mut query = Query::new(); + query.insert_all(); + + let mut subq = Query::new_with_direction(false); + subq.insert_all(); + + let mut sub_subquery = Query::new(); + sub_subquery.insert_all(); + + subq.set_subquery(sub_subquery); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![DEEP_LEAF.to_vec()], query); + + let proof = temp_db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_raw(proof.as_slice(), &path_query).expect("should execute proof"); + + assert_eq!(hash, temp_db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); + + let keys = [ + b"key4".to_vec(), + b"key5".to_vec(), + b"key6".to_vec(), + b"key1".to_vec(), + b"key2".to_vec(), + b"key3".to_vec(), + b"key12".to_vec(), + b"key13".to_vec(), + b"key14".to_vec(), + b"key10".to_vec(), + b"key11".to_vec(), + b"key7".to_vec(), + b"key8".to_vec(), + b"key9".to_vec(), + ]; + let values = [ + b"value4".to_vec(), + b"value5".to_vec(), + b"value6".to_vec(), + b"value1".to_vec(), + b"value2".to_vec(), + b"value3".to_vec(), + b"value12".to_vec(), + b"value13".to_vec(), + b"value14".to_vec(), + b"value10".to_vec(), + b"value11".to_vec(), + b"value7".to_vec(), + b"value8".to_vec(), + b"value9".to_vec(), + ]; + let elements = values.map(|x| Element::new_item(x).serialize().unwrap()); + let expected_result_set: Vec<(Vec, Vec)> = keys.into_iter().zip(elements).collect(); + compare_result_tuples(result_set, expected_result_set); + } + + #[test] + fn test_checkpoint() { + let db = make_test_grovedb(); + let element1 = Element::new_item(b"ayy".to_vec()); + + db.insert(EMPTY_PATH, b"key1", Element::empty_tree(), None, None) + .unwrap() + .expect("cannot insert a subtree 1 into GroveDB"); + db.insert( + [b"key1".as_ref()].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, ) .unwrap() - .expect("expected successful get_query") - .to_key_elements(), - vec![ - (b"key3".to_vec(), Element::new_item(b"ayya".to_vec())), - (b"key4".to_vec(), Element::new_item(b"ayyb".to_vec())), - (b"key6".to_vec(), Element::new_item(b"ayyd".to_vec())), - ] - ); -} + .expect("cannot insert a subtree 2 into GroveDB"); + db.insert( + [b"key1".as_ref(), b"key2".as_ref()].as_ref(), + b"key3", + element1.clone(), + None, + None, + ) + .unwrap() + .expect("cannot insert an item into GroveDB"); + + assert_eq!( + db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from grovedb"), + element1 + ); + + let tempdir_parent = TempDir::new().expect("cannot open tempdir"); + let checkpoint_tempdir = tempdir_parent.path().join("checkpoint"); + db.create_checkpoint(&checkpoint_tempdir) + .expect("cannot create checkpoint"); + + let checkpoint_db = + GroveDb::open(checkpoint_tempdir).expect("cannot open grovedb from checkpoint"); + + assert_eq!( + db.get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from grovedb"), + element1 + ); + assert_eq!( + checkpoint_db + .get([b"key1".as_ref(), b"key2".as_ref()].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get from checkpoint"), + element1 + ); + + let element2 = Element::new_item(b"ayy2".to_vec()); + let element3 = Element::new_item(b"ayy3".to_vec()); + + checkpoint_db + .insert( + [b"key1".as_ref()].as_ref(), + b"key4", + element2.clone(), + None, + None, + ) + .unwrap() + .expect("cannot insert into checkpoint"); + + db.insert( + [b"key1".as_ref()].as_ref(), + b"key4", + element3.clone(), + None, + None, + ) + .unwrap() + .expect("cannot insert into GroveDB"); + + assert_eq!( + checkpoint_db + .get([b"key1".as_ref()].as_ref(), b"key4", None) + .unwrap() + .expect("cannot get from checkpoint"), + element2, + ); + + assert_eq!( + db.get([b"key1".as_ref()].as_ref(), b"key4", None) + .unwrap() + .expect("cannot get from GroveDB"), + element3 + ); + + checkpoint_db + .insert( + [b"key1".as_ref()].as_ref(), + b"key5", + element3.clone(), + None, + None, + ) + .unwrap() + .expect("cannot insert into checkpoint"); + + db.insert([b"key1".as_ref()].as_ref(), b"key6", element3, None, None) + .unwrap() + .expect("cannot insert into GroveDB"); + + assert!(matches!( + checkpoint_db + .get([b"key1".as_ref()].as_ref(), b"key6", None) + .unwrap(), + Err(Error::PathKeyNotFound(_)) + )); + + assert!(matches!( + db.get([b"key1".as_ref()].as_ref(), b"key5", None).unwrap(), + Err(Error::PathKeyNotFound(_)) + )); + } + + #[test] + fn test_is_empty_tree() { + let db = make_test_grovedb(); + + // Create an empty tree with no elements + db.insert( + [TEST_LEAF].as_ref(), + b"innertree", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .unwrap(); + + assert!(db + .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) + .unwrap() + .expect("path is valid tree")); + + // add an element to the tree to make it non empty + db.insert( + [TEST_LEAF, b"innertree"].as_ref(), + b"key1", + Element::new_item(b"hello".to_vec()), + None, + None, + ) + .unwrap() + .unwrap(); + assert!(!db + .is_empty_tree([TEST_LEAF, b"innertree"].as_ref(), None) + .unwrap() + .expect("path is valid tree")); + } + + #[test] + fn transaction_should_be_aborted_when_rollback_is_called() { + let item_key = b"key3"; + + let db = make_test_grovedb(); + let transaction = db.start_transaction(); + + let element1 = Element::new_item(b"ayy".to_vec()); + + let result = db + .insert( + [TEST_LEAF].as_ref(), + item_key, + element1, + None, + Some(&transaction), + ) + .unwrap(); + + assert!(matches!(result, Ok(()))); + + db.rollback_transaction(&transaction).unwrap(); + + let result = db + .get([TEST_LEAF].as_ref(), item_key, Some(&transaction)) + .unwrap(); + assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); + } + + #[test] + fn transaction_should_be_aborted() { + let db = make_test_grovedb(); + let transaction = db.start_transaction(); + + let item_key = b"key3"; + let element = Element::new_item(b"ayy".to_vec()); + + db.insert( + [TEST_LEAF].as_ref(), + item_key, + element, + None, + Some(&transaction), + ) + .unwrap() + .unwrap(); + + drop(transaction); + + // Transactional data shouldn't be committed to the main database + let result = db.get([TEST_LEAF].as_ref(), item_key, None).unwrap(); + assert!(matches!(result, Err(Error::PathKeyNotFound(_)))); + } + + #[test] + fn test_subtree_pairs_iterator() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); + let element2 = Element::new_item(b"lmao".to_vec()); + + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"subtree1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"subtree11", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key1", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + assert_eq!( + db.get( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key1", + None + ) + .unwrap() + .expect("successful get 1"), + element + ); + db.insert( + [TEST_LEAF, b"subtree1", b"subtree11"].as_ref(), + b"key0", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"subtree12", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 3 insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"key1", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"subtree1"].as_ref(), + b"key2", + element2.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + // Iterate over subtree1 to see if keys of other subtrees messed up + // let mut iter = db + // .elements_iterator([TEST_LEAF, b"subtree1"].as_ref(), None) + // .expect("cannot create iterator"); + let storage_context = db + .grove_db + .db + .get_storage_context([TEST_LEAF, b"subtree1"].as_ref().into(), None) + .unwrap(); + let mut iter = Element::iterator(storage_context.raw_iter()).unwrap(); + assert_eq!( + iter.next_element().unwrap().unwrap(), + Some((b"key1".to_vec(), element)) + ); + assert_eq!( + iter.next_element().unwrap().unwrap(), + Some((b"key2".to_vec(), element2)) + ); + let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); + assert_eq!(subtree_element.0, b"subtree11".to_vec()); + assert!(matches!(subtree_element.1, Element::Tree(..))); + let subtree_element = iter.next_element().unwrap().unwrap().unwrap(); + assert_eq!(subtree_element.0, b"subtree12".to_vec()); + assert!(matches!(subtree_element.1, Element::Tree(..))); + assert!(matches!(iter.next_element().unwrap(), Ok(None))); + } + + #[test] + fn test_find_subtrees() { + let element = Element::new_item(b"ayy".to_vec()); + let db = make_test_grovedb(); + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element, + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key4", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 3 insert"); + let subtrees = db + .find_subtrees(&[TEST_LEAF].as_ref().into(), None) + .unwrap() + .expect("cannot get subtrees"); + assert_eq!( + vec![ + vec![TEST_LEAF], + vec![TEST_LEAF, b"key1"], + vec![TEST_LEAF, b"key4"], + vec![TEST_LEAF, b"key1", b"key2"], + ], + subtrees + ); + } + + #[test] + fn test_root_subtree_has_root_key() { + let db = make_test_grovedb(); + let storage = db.db.get_storage_context(EMPTY_PATH, None).unwrap(); + let root_merk = Merk::open_base( + storage, + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("expected to get root merk"); + let (_, root_key, _) = root_merk + .root_hash_key_and_sum() + .unwrap() + .expect("expected to get root hash, key and sum"); + assert!(root_key.is_some()) + } + + #[test] + fn test_get_subtree() { + let db = make_test_grovedb(); + let element = Element::new_item(b"ayy".to_vec()); + + // Returns error is subtree is not valid + { + let subtree = db.get([TEST_LEAF].as_ref(), b"invalid_tree", None).unwrap(); + assert!(subtree.is_err()); + + // Doesn't return an error for subtree that exists but empty + let subtree = db.get(EMPTY_PATH, TEST_LEAF, None).unwrap(); + assert!(subtree.is_ok()); + } + + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + + let key1_tree = db + .get(EMPTY_PATH, TEST_LEAF, None) + .unwrap() + .expect("expected to get a root tree"); + + assert!( + matches!(key1_tree, Element::Tree(Some(_), _)), + "{}", + format!( + "expected tree with root key, got {:?}", + if let Element::Tree(tree, ..) = key1_tree { + format!("{:?}", tree) + } else { + "not a tree".to_string() + } + ) + ); + + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key4", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 3 insert"); + + // Retrieve subtree instance + // Check if it returns the same instance that was inserted + { + let subtree_storage = db + .grove_db + .db + .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) + .unwrap(); + let subtree = Merk::open_layered_with_root_key( + subtree_storage, + Some(b"key3".to_vec()), + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("cannot open merk"); + let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); + assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); + } + // Insert a new tree with transaction + let transaction = db.start_transaction(); + + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"innertree", + Element::empty_tree(), + None, + Some(&transaction), + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"key1", b"innertree"].as_ref(), + b"key4", + element, + None, + Some(&transaction), + ) + .unwrap() + .expect("successful value insert"); + + // Retrieve subtree instance with transaction + let subtree_storage = db + .grove_db + .db + .get_transactional_storage_context( + [TEST_LEAF, b"key1", b"innertree"].as_ref().into(), + None, + &transaction, + ) + .unwrap(); + let subtree = Merk::open_layered_with_root_key( + subtree_storage, + Some(b"key4".to_vec()), + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("cannot open merk"); + let result_element = Element::get(&subtree, b"key4", true).unwrap().unwrap(); + assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); + + // Should be able to retrieve instances created before transaction + let subtree_storage = db + .grove_db + .db + .get_storage_context([TEST_LEAF, b"key1", b"key2"].as_ref().into(), None) + .unwrap(); + let subtree = Merk::open_layered_with_root_key( + subtree_storage, + Some(b"key3".to_vec()), + false, + Some(&Element::value_defined_cost_for_serialized_value), + ) + .unwrap() + .expect("cannot open merk"); + let result_element = Element::get(&subtree, b"key3", true).unwrap().unwrap(); + assert_eq!(result_element, Element::new_item(b"ayy".to_vec())); + } -#[test] -fn test_aux_uses_separate_cf() { - let element = Element::new_item(b"ayy".to_vec()); - let db = make_test_grovedb(); - // Insert some nested subtrees - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 1 insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree 2 insert"); - // Insert an element into subtree - db.insert( - [TEST_LEAF, b"key1", b"key2"].as_ref(), - b"key3", - element.clone(), - None, - None, - ) - .unwrap() - .expect("successful value insert"); + #[test] + fn test_get_full_query() { + let db = make_test_grovedb(); - db.put_aux(b"key1", b"a", None, None) + // Insert a couple of subtrees first + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert some elements into subtree + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key3", + Element::new_item(b"ayya".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.put_aux(b"key2", b"b", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key4", + Element::new_item(b"ayyb".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.put_aux(b"key3", b"c", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key5", + Element::new_item(b"ayyc".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot put aux"); - db.delete_aux(b"key3", None, None) + .expect("successful value insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), + b"key6", + Element::new_item(b"ayyd".to_vec()), + None, + None, + ) .unwrap() - .expect("cannot delete from aux"); + .expect("successful value insert"); - assert_eq!( - db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + // Test_Leaf + // ___________________________ + // / \ + // key1 key2 + // ___________________________ + // | | + // key4 key6 + // / \ + // key3 key5 + // + + let path1 = vec![TEST_LEAF.to_vec(), b"key1".to_vec()]; + let path2 = vec![TEST_LEAF.to_vec(), b"key2".to_vec()]; + let mut query1 = Query::new(); + let mut query2 = Query::new(); + query1.insert_range_inclusive(b"key3".to_vec()..=b"key4".to_vec()); + query2.insert_key(b"key6".to_vec()); + + let path_query1 = PathQuery::new_unsized(path1, query1); + // should get back key3, key4 + let path_query2 = PathQuery::new_unsized(path2, query2); + // should get back key6 + + assert_eq!( + db.query_many_raw( + &[&path_query1, &path_query2], + true, + true, + true, + QueryKeyElementPairResultType, + None + ) .unwrap() - .expect("cannot get element"), - element - ); - assert_eq!( - db.get_aux(b"key1", None) + .expect("expected successful get_query") + .to_key_elements(), + vec![ + (b"key3".to_vec(), Element::new_item(b"ayya".to_vec())), + (b"key4".to_vec(), Element::new_item(b"ayyb".to_vec())), + (b"key6".to_vec(), Element::new_item(b"ayyd".to_vec())), + ] + ); + } + + #[test] + fn test_aux_uses_separate_cf() { + let element = Element::new_item(b"ayy".to_vec()); + let db = make_test_grovedb(); + // Insert some nested subtrees + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 1 insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree 2 insert"); + // Insert an element into subtree + db.insert( + [TEST_LEAF, b"key1", b"key2"].as_ref(), + b"key3", + element.clone(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + db.put_aux(b"key1", b"a", None, None) .unwrap() - .expect("cannot get from aux"), - Some(b"a".to_vec()) - ); - assert_eq!( - db.get_aux(b"key2", None) + .expect("cannot put aux"); + db.put_aux(b"key2", b"b", None, None) .unwrap() - .expect("cannot get from aux"), - Some(b"b".to_vec()) - ); - assert_eq!( - db.get_aux(b"key3", None) + .expect("cannot put aux"); + db.put_aux(b"key3", b"c", None, None) .unwrap() - .expect("cannot get from aux"), - None - ); - assert_eq!( - db.get_aux(b"key4", None) + .expect("cannot put aux"); + db.delete_aux(b"key3", None, None) .unwrap() - .expect("cannot get from aux"), - None - ); -} + .expect("cannot delete from aux"); -#[test] -fn test_aux_with_transaction() { - let element = Element::new_item(b"ayy".to_vec()); - let aux_value = b"ayylmao".to_vec(); - let key = b"key".to_vec(); - let db = make_test_grovedb(); - let transaction = db.start_transaction(); + assert_eq!( + db.get([TEST_LEAF, b"key1", b"key2"].as_ref(), b"key3", None) + .unwrap() + .expect("cannot get element"), + element + ); + assert_eq!( + db.get_aux(b"key1", None) + .unwrap() + .expect("cannot get from aux"), + Some(b"a".to_vec()) + ); + assert_eq!( + db.get_aux(b"key2", None) + .unwrap() + .expect("cannot get from aux"), + Some(b"b".to_vec()) + ); + assert_eq!( + db.get_aux(b"key3", None) + .unwrap() + .expect("cannot get from aux"), + None + ); + assert_eq!( + db.get_aux(b"key4", None) + .unwrap() + .expect("cannot get from aux"), + None + ); + } - // Insert a regular data with aux data in the same transaction - db.insert( - [TEST_LEAF].as_ref(), - &key, - element, - None, - Some(&transaction), - ) - .unwrap() - .expect("unable to insert"); - db.put_aux(&key, &aux_value, None, Some(&transaction)) - .unwrap() - .expect("unable to insert aux value"); - assert_eq!( - db.get_aux(&key, Some(&transaction)) - .unwrap() - .expect("unable to get aux value"), - Some(aux_value.clone()) - ); - // Cannot reach the data outside of transaction - assert_eq!( - db.get_aux(&key, None) - .unwrap() - .expect("unable to get aux value"), - None - ); - // And should be able to get data when committed - db.commit_transaction(transaction) - .unwrap() - .expect("unable to commit transaction"); - assert_eq!( - db.get_aux(&key, None) - .unwrap() - .expect("unable to get committed aux value"), - Some(aux_value) - ); -} + #[test] + fn test_aux_with_transaction() { + let element = Element::new_item(b"ayy".to_vec()); + let aux_value = b"ayylmao".to_vec(); + let key = b"key".to_vec(); + let db = make_test_grovedb(); + let transaction = db.start_transaction(); -#[test] -fn test_root_hash() { - let db = make_test_grovedb(); - // Check hashes are different if tree is edited - let old_root_hash = db.root_hash(None).unwrap(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("unable to insert an item"); - assert_ne!(old_root_hash.unwrap(), db.root_hash(None).unwrap().unwrap()); + // Insert a regular data with aux data in the same transaction + db.insert( + [TEST_LEAF].as_ref(), + &key, + element, + None, + Some(&transaction), + ) + .unwrap() + .expect("unable to insert"); + db.put_aux(&key, &aux_value, None, Some(&transaction)) + .unwrap() + .expect("unable to insert aux value"); + assert_eq!( + db.get_aux(&key, Some(&transaction)) + .unwrap() + .expect("unable to get aux value"), + Some(aux_value.clone()) + ); + // Cannot reach the data outside of transaction + assert_eq!( + db.get_aux(&key, None) + .unwrap() + .expect("unable to get aux value"), + None + ); + // And should be able to get data when committed + db.commit_transaction(transaction) + .unwrap() + .expect("unable to commit transaction"); + assert_eq!( + db.get_aux(&key, None) + .unwrap() + .expect("unable to get committed aux value"), + Some(aux_value) + ); + } - // Check isolation - let transaction = db.start_transaction(); + #[test] + fn test_root_hash() { + let db = make_test_grovedb(); + // Check hashes are different if tree is edited + let old_root_hash = db.root_hash(None).unwrap(); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) + .unwrap() + .expect("unable to insert an item"); + assert_ne!(old_root_hash.unwrap(), db.root_hash(None).unwrap().unwrap()); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::new_item(b"ayy".to_vec()), - None, - Some(&transaction), - ) - .unwrap() - .expect("unable to insert an item"); - let root_hash_outside = db.root_hash(None).unwrap().unwrap(); - assert_ne!( - db.root_hash(Some(&transaction)).unwrap().unwrap(), - root_hash_outside - ); - - assert_eq!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); - db.commit_transaction(transaction).unwrap().unwrap(); - assert_ne!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); -} + // Check isolation + let transaction = db.start_transaction(); -#[test] -fn test_get_non_existing_root_leaf() { - let db = make_test_grovedb(); - assert!(db.get(EMPTY_PATH, b"ayy", None).unwrap().is_err()); -} + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::new_item(b"ayy".to_vec()), + None, + Some(&transaction), + ) + .unwrap() + .expect("unable to insert an item"); + let root_hash_outside = db.root_hash(None).unwrap().unwrap(); + assert_ne!( + db.root_hash(Some(&transaction)).unwrap().unwrap(), + root_hash_outside + ); -#[test] -fn test_check_subtree_exists_function() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key_scalar", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - db.insert( - [TEST_LEAF].as_ref(), - b"key_subtree", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); + assert_eq!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); + db.commit_transaction(transaction).unwrap().unwrap(); + assert_ne!(db.root_hash(None).unwrap().unwrap(), root_hash_outside); + } - // Empty tree path means root always exist - assert!(db - .check_subtree_exists_invalid_path(EMPTY_PATH, None) - .unwrap() - .is_ok()); + #[test] + fn test_get_non_existing_root_leaf() { + let db = make_test_grovedb(); + assert!(db.get(EMPTY_PATH, b"ayy", None).unwrap().is_err()); + } - // TEST_LEAF should be a tree - assert!(db - .check_subtree_exists_invalid_path([TEST_LEAF].as_ref().into(), None) + #[test] + fn test_check_subtree_exists_function() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"key_scalar", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() - .is_ok()); - - // TEST_LEAF.key_subtree should be a tree - assert!(db - .check_subtree_exists_invalid_path([TEST_LEAF, b"key_subtree"].as_ref().into(), None) + .expect("cannot insert item"); + db.insert( + [TEST_LEAF].as_ref(), + b"key_subtree", + Element::empty_tree(), + None, + None, + ) .unwrap() - .is_ok()); + .expect("cannot insert item"); - // TEST_LEAF.key_scalar should NOT be a tree - assert!(matches!( - db.check_subtree_exists_invalid_path([TEST_LEAF, b"key_scalar"].as_ref().into(), None) - .unwrap(), - Err(Error::InvalidPath(_)) - )); -} + // Empty tree path means root always exist + assert!(db + .check_subtree_exists_invalid_path(EMPTY_PATH, None) + .unwrap() + .is_ok()); -#[test] -fn test_tree_value_exists_method_no_tx() { - let db = make_test_grovedb(); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", None) - .unwrap() - .unwrap()); - assert!(!db - .has_raw([TEST_LEAF].as_ref(), b"badkey", None) - .unwrap() - .unwrap()); + // TEST_LEAF should be a tree + assert!(db + .check_subtree_exists_invalid_path([TEST_LEAF].as_ref().into(), None) + .unwrap() + .is_ok()); + + // TEST_LEAF.key_subtree should be a tree + assert!(db + .check_subtree_exists_invalid_path([TEST_LEAF, b"key_subtree"].as_ref().into(), None) + .unwrap() + .is_ok()); + + // TEST_LEAF.key_scalar should NOT be a tree + assert!(matches!( + db.check_subtree_exists_invalid_path([TEST_LEAF, b"key_scalar"].as_ref().into(), None) + .unwrap(), + Err(Error::InvalidPath(_)) + )); + } - // Test keys for a root tree - db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, None) + #[test] + fn test_tree_value_exists_method_no_tx() { + let db = make_test_grovedb(); + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() .expect("cannot insert item"); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); + assert!(!db + .has_raw([TEST_LEAF].as_ref(), b"badkey", None) + .unwrap() + .unwrap()); - assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); - assert!(db.has_raw(EMPTY_PATH, TEST_LEAF, None).unwrap().unwrap()); - assert!(!db.has_raw(EMPTY_PATH, b"badleaf", None).unwrap().unwrap()); -} + // Test keys for a root tree + db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, None) + .unwrap() + .expect("cannot insert item"); -#[test] -fn test_tree_value_exists_method_tx() { - let db = make_test_grovedb(); - let tx = db.start_transaction(); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - Some(&tx), - ) - .unwrap() - .expect("cannot insert item"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", Some(&tx)) - .unwrap() - .unwrap()); - assert!(!db - .has_raw([TEST_LEAF].as_ref(), b"key", None) - .unwrap() - .unwrap()); + assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + assert!(db.has_raw(EMPTY_PATH, TEST_LEAF, None).unwrap().unwrap()); + assert!(!db.has_raw(EMPTY_PATH, b"badleaf", None).unwrap().unwrap()); + } - // Test keys for a root tree - db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, Some(&tx)) + #[test] + fn test_tree_value_exists_method_tx() { + let db = make_test_grovedb(); + let tx = db.start_transaction(); + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + Some(&tx), + ) .unwrap() .expect("cannot insert item"); - assert!(db.has_raw(EMPTY_PATH, b"leaf", Some(&tx)).unwrap().unwrap()); - assert!(!db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", Some(&tx)) + .unwrap() + .unwrap()); + assert!(!db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); - db.commit_transaction(tx) - .unwrap() - .expect("cannot commit transaction"); - assert!(db - .has_raw([TEST_LEAF].as_ref(), b"key", None) + // Test keys for a root tree + db.insert(EMPTY_PATH, b"leaf", Element::empty_tree(), None, Some(&tx)) + .unwrap() + .expect("cannot insert item"); + assert!(db.has_raw(EMPTY_PATH, b"leaf", Some(&tx)).unwrap().unwrap()); + assert!(!db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + + db.commit_transaction(tx) + .unwrap() + .expect("cannot commit transaction"); + assert!(db + .has_raw([TEST_LEAF].as_ref(), b"key", None) + .unwrap() + .unwrap()); + assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); + } + + #[test] + fn test_storage_wipe() { + let db = make_test_grovedb(); + let _path = db._tmp_dir.path(); + + // Test keys in non-root tree + db.insert( + [TEST_LEAF].as_ref(), + b"key", + Element::new_item(b"ayy".to_vec()), + None, + None, + ) .unwrap() - .unwrap()); - assert!(db.has_raw(EMPTY_PATH, b"leaf", None).unwrap().unwrap()); -} + .expect("cannot insert item"); -#[test] -fn test_storage_wipe() { - let db = make_test_grovedb(); - let _path = db._tmp_dir.path(); + // retrieve key before wipe + let elem = db.get(&[TEST_LEAF], b"key", None).unwrap().unwrap(); + assert_eq!(elem, Element::new_item(b"ayy".to_vec())); - // Test keys in non-root tree - db.insert( - [TEST_LEAF].as_ref(), - b"key", - Element::new_item(b"ayy".to_vec()), - None, - None, - ) - .unwrap() - .expect("cannot insert item"); - - // retrieve key before wipe - let elem = db.get(&[TEST_LEAF], b"key", None).unwrap().unwrap(); - assert_eq!(elem, Element::new_item(b"ayy".to_vec())); - - // wipe the database - db.grove_db.wipe().unwrap(); - - // retrieve key after wipe - let elem_result = db.get(&[TEST_LEAF], b"key", None).unwrap(); - assert!(elem_result.is_err()); - assert!(matches!( - elem_result, - Err(Error::PathParentLayerNotFound(..)) - )); + // wipe the database + db.grove_db.wipe().unwrap(); + + // retrieve key after wipe + let elem_result = db.get(&[TEST_LEAF], b"key", None).unwrap(); + assert!(elem_result.is_err()); + assert!(matches!( + elem_result, + Err(Error::PathParentLayerNotFound(..)) + )); + } } diff --git a/grovedb/src/tests/query_tests.rs b/grovedb/src/tests/query_tests.rs index 579b2e426..e7325a6ae 100644 --- a/grovedb/src/tests/query_tests.rs +++ b/grovedb/src/tests/query_tests.rs @@ -1,130 +1,168 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - -//! Query tests - -use grovedb_merk::proofs::{query::QueryItem, Query}; -use rand::Rng; -use tempfile::TempDir; - -use crate::{ - batch::GroveDbOp, - query_result_type::{PathKeyOptionalElementTrio, QueryResultType}, - reference_path::ReferencePathType, - tests::{ - common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, - ANOTHER_TEST_LEAF, TEST_LEAF, - }, - Element, GroveDb, PathQuery, SizedQuery, -}; - -fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); - db.insert( - [TEST_LEAF].as_ref(), - &i_vec, - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree - db.insert( - [TEST_LEAF, i_vec.as_slice()].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); +mod tests { + //! Query tests + + use grovedb_merk::proofs::{query::QueryItem, Query}; + use rand::Rng; + use tempfile::TempDir; + + use crate::{ + batch::GroveDbOp, + query_result_type::{ + PathKeyOptionalElementTrio, QueryResultElement::PathKeyElementTrioResultItem, + QueryResultElements, QueryResultType, + }, + reference_path::ReferencePathType, + tests::{ + common::compare_result_sets, make_deep_tree, make_test_grovedb, TempGroveDb, + ANOTHER_TEST_LEAF, TEST_LEAF, + }, + Element, GroveDb, PathQuery, SizedQuery, + }; - for j in 100u32..150 { - let mut j_vec = i_vec.clone(); - j_vec.append(&mut j.to_be_bytes().to_vec()); + fn populate_tree_for_non_unique_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, i_vec.as_slice(), b"\0"].as_ref(), - &j_vec.clone(), - Element::new_item(j_vec), + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice()].as_ref(), + b"\0", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for j in 100u32..150 { + let mut j_vec = i_vec.clone(); + j_vec.append(&mut j.to_be_bytes().to_vec()); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"\0"].as_ref(), + &j_vec.clone(), + Element::new_item(j_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } } } -} -fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 0u32..10 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 0u32..10 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice()].as_ref(), + b"a", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for j in 25u32..50 { + let j_vec = j.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a"].as_ref(), + &j_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + + // Insert element 0 + // Insert some elements into subtree + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a", j_vec.as_slice()].as_ref(), + b"\0", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + for k in 100u32..110 { + let k_vec = k.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, i_vec.as_slice(), b"a", &j_vec, b"\0"].as_ref(), + &k_vec.clone(), + Element::new_item(k_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + } + } + + fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { + // This subtree will be holding values db.insert( [TEST_LEAF].as_ref(), - &i_vec, + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree + + // This subtree will be holding references db.insert( - [TEST_LEAF, i_vec.as_slice()].as_ref(), - b"a", + [TEST_LEAF].as_ref(), + b"1", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - - for j in 25u32..50 { - let j_vec = j.to_be_bytes().to_vec(); + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a"].as_ref(), - &j_vec, + [TEST_LEAF, b"1"].as_ref(), + &i_vec, Element::empty_tree(), None, None, ) .unwrap() - .expect("successful value insert"); - + .expect("successful subtree insert"); // Insert element 0 // Insert some elements into subtree db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a", j_vec.as_slice()].as_ref(), + [TEST_LEAF, b"1", i_vec.as_slice()].as_ref(), b"\0", Element::empty_tree(), None, @@ -133,12 +171,30 @@ fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { .unwrap() .expect("successful subtree insert"); - for k in 100u32..110 { - let k_vec = k.to_be_bytes().to_vec(); + for j in 100u32..150 { + let random_key = rand::thread_rng().gen::<[u8; 32]>(); + let mut j_vec = i_vec.clone(); + j_vec.append(&mut j.to_be_bytes().to_vec()); + + // We should insert every item to the tree holding items + db.insert( + [TEST_LEAF, b"\0"].as_ref(), + &random_key, + Element::new_item(j_vec.clone()), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + db.insert( - [TEST_LEAF, i_vec.as_slice(), b"a", &j_vec, b"\0"].as_ref(), - &k_vec.clone(), - Element::new_item(k_vec), + [TEST_LEAF, b"1", i_vec.clone().as_slice(), b"\0"].as_ref(), + &random_key, + Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ + TEST_LEAF.to_vec(), + b"\0".to_vec(), + random_key.to_vec(), + ])), None, None, ) @@ -147,47 +203,49 @@ fn populate_tree_for_non_unique_double_range_subquery(db: &TempGroveDb) { } } } -} -fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { - // This subtree will be holding values - db.insert( - [TEST_LEAF].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // This subtree will be holding references - db.insert( - [TEST_LEAF].as_ref(), - b"1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_for_unique_range_subquery(db: &TempGroveDb) { + // Insert a couple of subtrees first + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, &i_vec.clone()].as_ref(), + b"\0", + Element::new_item(i_vec), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + + fn populate_tree_by_reference_for_unique_range_subquery(db: &TempGroveDb) { + // This subtree will be holding values db.insert( - [TEST_LEAF, b"1"].as_ref(), - &i_vec, + [TEST_LEAF].as_ref(), + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); - // Insert element 0 - // Insert some elements into subtree + + // This subtree will be holding references db.insert( - [TEST_LEAF, b"1", i_vec.as_slice()].as_ref(), - b"\0", + [TEST_LEAF].as_ref(), + b"1", Element::empty_tree(), None, None, @@ -195,29 +253,37 @@ fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { .unwrap() .expect("successful subtree insert"); - for j in 100u32..150 { - let random_key = rand::thread_rng().gen::<[u8; 32]>(); - let mut j_vec = i_vec.clone(); - j_vec.append(&mut j.to_be_bytes().to_vec()); + for i in 1985u32..2000 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, b"1"].as_ref(), + &i_vec, + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); // We should insert every item to the tree holding items db.insert( [TEST_LEAF, b"\0"].as_ref(), - &random_key, - Element::new_item(j_vec.clone()), + &i_vec, + Element::new_item(i_vec.clone()), None, None, ) .unwrap() .expect("successful value insert"); + // We should insert a reference to the item db.insert( - [TEST_LEAF, b"1", i_vec.clone().as_slice(), b"\0"].as_ref(), - &random_key, + [TEST_LEAF, b"1", i_vec.clone().as_slice()].as_ref(), + b"\0", Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ TEST_LEAF.to_vec(), b"\0".to_vec(), - random_key.to_vec(), + i_vec.clone(), ])), None, None, @@ -226,2461 +292,2519 @@ fn populate_tree_by_reference_for_non_unique_range_subquery(db: &TempGroveDb) { .expect("successful value insert"); } } -} -fn populate_tree_for_unique_range_subquery(db: &TempGroveDb) { - // Insert a couple of subtrees first - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); + fn populate_tree_for_unique_range_subquery_with_non_unique_null_values(db: &mut TempGroveDb) { + populate_tree_for_unique_range_subquery(db); + db.insert([TEST_LEAF].as_ref(), &[], Element::empty_tree(), None, None) + .unwrap() + .expect("successful subtree insert"); db.insert( - [TEST_LEAF].as_ref(), - &i_vec, + [TEST_LEAF, &[]].as_ref(), + b"\0", Element::empty_tree(), None, None, ) .unwrap() .expect("successful subtree insert"); + // Insert a couple of subtrees first + for i in 100u32..200 { + let i_vec = i.to_be_bytes().to_vec(); + db.insert( + [TEST_LEAF, &[], b"\0"].as_ref(), + &i_vec, + Element::new_item(i_vec.clone()), + None, + None, + ) + .unwrap() + .expect("successful value insert"); + } + } + fn populate_tree_for_uneven_keys(db: &TempGroveDb) { db.insert( - [TEST_LEAF, &i_vec.clone()].as_ref(), - b"\0", - Element::new_item(i_vec), + [TEST_LEAF].as_ref(), + "b".as_ref(), + Element::new_item(1u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); - } -} + .expect("successful subtree insert"); -fn populate_tree_by_reference_for_unique_range_subquery(db: &TempGroveDb) { - // This subtree will be holding values - db.insert( - [TEST_LEAF].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // This subtree will be holding references - db.insert( - [TEST_LEAF].as_ref(), - b"1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - for i in 1985u32..2000 { - let i_vec = i.to_be_bytes().to_vec(); db.insert( - [TEST_LEAF, b"1"].as_ref(), - &i_vec, - Element::empty_tree(), + [TEST_LEAF].as_ref(), + "ab".as_ref(), + Element::new_item(2u8.to_be_bytes().to_vec()), None, None, ) .unwrap() .expect("successful subtree insert"); - // We should insert every item to the tree holding items db.insert( - [TEST_LEAF, b"\0"].as_ref(), - &i_vec, - Element::new_item(i_vec.clone()), + [TEST_LEAF].as_ref(), + "x".as_ref(), + Element::new_item(3u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); - // We should insert a reference to the item db.insert( - [TEST_LEAF, b"1", i_vec.clone().as_slice()].as_ref(), - b"\0", - Element::new_reference(ReferencePathType::AbsolutePathReference(vec![ - TEST_LEAF.to_vec(), - b"\0".to_vec(), - i_vec.clone(), - ])), + [TEST_LEAF].as_ref(), + &[3; 32], + Element::new_item(4u8.to_be_bytes().to_vec()), None, None, ) - .unwrap() - .expect("successful value insert"); - } -} - -fn populate_tree_for_unique_range_subquery_with_non_unique_null_values(db: &mut TempGroveDb) { - populate_tree_for_unique_range_subquery(db); - db.insert([TEST_LEAF].as_ref(), &[], Element::empty_tree(), None, None) .unwrap() .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, &[]].as_ref(), - b"\0", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - // Insert a couple of subtrees first - for i in 100u32..200 { - let i_vec = i.to_be_bytes().to_vec(); + db.insert( - [TEST_LEAF, &[], b"\0"].as_ref(), - &i_vec, - Element::new_item(i_vec.clone()), + [TEST_LEAF].as_ref(), + "k".as_ref(), + Element::new_item(5u8.to_be_bytes().to_vec()), None, None, ) .unwrap() - .expect("successful value insert"); + .expect("successful subtree insert"); } -} -#[test] -fn test_get_range_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + #[test] + fn test_get_correct_order() { + let db = make_test_grovedb(); + populate_tree_for_uneven_keys(&db); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + let path = vec![TEST_LEAF.to_vec()]; + let query = Query::new_range_full(); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let path_query = PathQuery::new_unsized(path, query.clone()); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path_query = PathQuery::new_unsized(path, query.clone()); + assert_eq!(elements, vec![vec![4], vec![2], vec![1], vec![5], vec![3]]); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 200); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_query_with_unique_subquery() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&mut db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 200); - let subquery_key: Vec = b"\0".to_vec(); + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_query_with_unique_subquery() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&mut db); - assert_eq!(elements.len(), 4); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1991_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_query_with_unique_subquery_on_references() { - let db = make_test_grovedb(); - populate_tree_by_reference_for_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; - let mut query = Query::new(); - query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 4); - let subquery_key: Vec = b"\0".to_vec(); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let last_value = 1991_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_query_with_unique_subquery_on_references() { + let db = make_test_grovedb(); + populate_tree_by_reference_for_unique_range_subquery(&db); - assert_eq!(elements.len(), 4); + let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; + let mut query = Query::new(); + query.insert_range(1988_u32.to_be_bytes().to_vec()..1992_u32.to_be_bytes().to_vec()); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1991_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_query_with_unique_subquery_with_non_unique_null_values() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_all(); + assert_eq!(elements.len(), 4); - let subquery_key: Vec = b"\0".to_vec(); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let last_value = 1991_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let mut subquery = Query::new(); - subquery.insert_all(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); + } - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![b"\0".to_vec()]), - Some(subquery), - ); + #[test] + fn test_get_range_query_with_unique_subquery_with_non_unique_null_values() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); - let path_query = PathQuery::new_unsized(path, query.clone()); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_all(); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + let subquery_key: Vec = b"\0".to_vec(); - assert_eq!(elements.len(), 115); + query.set_subquery_key(subquery_key); - let first_value = 100_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let mut subquery = Query::new(); + subquery.insert_all(); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![b"\0".to_vec()]), + Some(subquery), + ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 115); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_query_with_unique_subquery_ignore_non_unique_null_values() { - let mut db = make_test_grovedb(); - populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_all(); + assert_eq!(elements.len(), 115); - let subquery_key: Vec = b"\0".to_vec(); + let first_value = 100_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let subquery = Query::new(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 115); + compare_result_sets(&elements, &result_set); + } - query.add_conditional_subquery( - QueryItem::Key(b"".to_vec()), - Some(vec![b"\0".to_vec()]), - Some(subquery), - ); + #[test] + fn test_get_range_query_with_unique_subquery_ignore_non_unique_null_values() { + let mut db = make_test_grovedb(); + populate_tree_for_unique_range_subquery_with_non_unique_null_values(&mut db); - let path_query = PathQuery::new_unsized(path, query.clone()); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_all(); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + let subquery_key: Vec = b"\0".to_vec(); - assert_eq!(elements.len(), 15); + query.set_subquery_key(subquery_key); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery = Query::new(); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + // This conditional subquery expresses that we do not want to get values in "" + // tree + query.add_conditional_subquery( + QueryItem::Key(b"".to_vec()), + Some(vec![b"\0".to_vec()]), // We want to go into 0 but we don't want to get anything + Some(subquery), + ); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 15); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 15); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 15); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 400); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 400); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_inclusive_query_with_non_unique_subquery_on_references() { - let db = make_test_grovedb(); - populate_tree_by_reference_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 400); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 400); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); - - assert_eq!(elements.len(), 400); - - let mut first_value = 1988_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - // using contains as the elements get stored at random key locations - // hence impossible to predict the final location - // but must exist - assert!(elements.contains(&first_value)); - - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert!(elements.contains(&last_value)); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 400); - compare_result_sets(&elements, &result_set); -} + #[test] + fn test_get_range_inclusive_query_with_non_unique_subquery_on_references() { + let db = make_test_grovedb(); + populate_tree_by_reference_for_non_unique_range_subquery(&db); -#[test] -fn test_get_range_inclusive_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let path = vec![TEST_LEAF.to_vec(), b"1".to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive(1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec()); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let subquery_key: Vec = b"\0".to_vec(); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - query.set_subquery_key(subquery_key); + let path_query = PathQuery::new_unsized(path, query.clone()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements.len(), 400); + + let mut first_value = 1988_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + // using contains as the elements get stored at random key locations + // hence impossible to predict the final location + // but must exist + assert!(elements.contains(&first_value)); + + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert!(elements.contains(&last_value)); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 400); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_inclusive_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - assert_eq!(elements.len(), 8); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive( + 1988_u32.to_be_bytes().to_vec()..=1995_u32.to_be_bytes().to_vec(), + ); - let first_value = 1988_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 8); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_from_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); + assert_eq!(elements.len(), 8); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 1988_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let last_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 8); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_from_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 250); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); - let mut first_value = 1995_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1999_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_from_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); + assert_eq!(elements.len(), 250); - let subquery_key: Vec = b"\0".to_vec(); + let mut first_value = 1995_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let mut last_value = 1999_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_from_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - assert_eq!(elements.len(), 5); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_from(1995_u32.to_be_bytes().to_vec()..); - let first_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1999_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_to_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 5); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let last_value = 1999_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_to_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 500); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); - let mut first_value = 1985_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1994_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 500); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_to_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 500); - let subquery_key: Vec = b"\0".to_vec(); + let mut first_value = 1985_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let mut last_value = 1994_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 500); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_to_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - assert_eq!(elements.len(), 10); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to(..1995_u32.to_be_bytes().to_vec()); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1994_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 10); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_to_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 10); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let last_value = 1994_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 10); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_to_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 550); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); - let mut first_value = 1985_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1995_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 550); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(false); - query.insert_range_to_inclusive(..=5000_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 550); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + let mut first_value = 1985_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let mut last_value = 1995_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 550); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 750); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(false); + query.insert_range_to_inclusive(..=5000_u32.to_be_bytes().to_vec()); - let mut first_value = 1999_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); - let mut last_value = 1985_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 750); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_to_inclusive_query_with_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 750); - let subquery_key: Vec = b"\0".to_vec(); + let mut first_value = 1999_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); + let mut last_value = 1985_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 750); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_to_inclusive_query_with_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - assert_eq!(elements.len(), 11); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_to_inclusive(..=1995_u32.to_be_bytes().to_vec()); - let first_value = 1985_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); - let last_value = 1995_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_after_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after(1995_u32.to_be_bytes().to_vec()..); + assert_eq!(elements.len(), 11); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 1985_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let last_value = 1995_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 11); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_after_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 200); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after(1995_u32.to_be_bytes().to_vec()..); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1999_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_after_to_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after_to(1995_u32.to_be_bytes().to_vec()..1997_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 200); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let mut last_value = 1999_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_after_to_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 50); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after_to( + 1995_u32.to_be_bytes().to_vec()..1997_u32.to_be_bytes().to_vec(), + ); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1996_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 50); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_after_to_inclusive_query_with_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_after_to_inclusive( - 1995_u32.to_be_bytes().to_vec()..=1997_u32.to_be_bytes().to_vec(), - ); + assert_eq!(elements.len(), 50); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let mut last_value = 1996_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 50); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_after_to_inclusive_query_with_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 100); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_after_to_inclusive( + 1995_u32.to_be_bytes().to_vec()..=1997_u32.to_be_bytes().to_vec(), + ); - let mut first_value = 1996_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1997_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 100); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_after_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(false); - query.insert_range_after_to_inclusive( - 1995_u32.to_be_bytes().to_vec()..=5000_u32.to_be_bytes().to_vec(), - ); + assert_eq!(elements.len(), 100); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new_with_direction(false); - subquery.insert_all(); + let mut first_value = 1996_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key); - query.set_subquery(subquery); + let mut last_value = 1997_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let path_query = PathQuery::new_unsized(path, query.clone()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 100); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_after_to_inclusive_query_with_non_unique_subquery_and_key_out_of_bounds() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 200); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(false); + query.insert_range_after_to_inclusive( + 1995_u32.to_be_bytes().to_vec()..=5000_u32.to_be_bytes().to_vec(), + ); - let mut first_value = 1999_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new_with_direction(false); + subquery.insert_all(); - let mut last_value = 1996_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 200); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_inclusive_query_with_double_non_unique_subquery() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_double_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new(); - query.insert_range_inclusive((3u32).to_be_bytes().to_vec()..=(4u32).to_be_bytes().to_vec()); + assert_eq!(elements.len(), 200); - query.set_subquery_key(b"a".to_vec()); + let mut first_value = 1999_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let mut subquery = Query::new(); - subquery - .insert_range_inclusive((29u32).to_be_bytes().to_vec()..=(31u32).to_be_bytes().to_vec()); + let mut last_value = 1996_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - subquery.set_subquery_key(b"\0".to_vec()); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 200); + compare_result_sets(&elements, &result_set); + } - let mut subsubquery = Query::new(); - subsubquery.insert_all(); + #[test] + fn test_get_range_inclusive_query_with_double_non_unique_subquery() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_double_range_subquery(&db); - subquery.set_subquery(subsubquery); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new(); + query.insert_range_inclusive((3u32).to_be_bytes().to_vec()..=(4u32).to_be_bytes().to_vec()); - query.set_subquery(subquery); + query.set_subquery_key(b"a".to_vec()); - let path_query = PathQuery::new_unsized(path, query.clone()); + let mut subquery = Query::new(); + subquery.insert_range_inclusive( + (29u32).to_be_bytes().to_vec()..=(31u32).to_be_bytes().to_vec(), + ); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + subquery.set_subquery_key(b"\0".to_vec()); - assert_eq!(elements.len(), 60); + let mut subsubquery = Query::new(); + subsubquery.insert_all(); - let first_value = 100_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + subquery.set_subquery(subsubquery); - let last_value = 109_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new_unsized(path, query.clone()); -#[test] -fn test_get_range_query_with_limit_and_offset() { - let db = make_test_grovedb(); - populate_tree_for_non_unique_range_subquery(&db); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let path = vec![TEST_LEAF.to_vec()]; - let mut query = Query::new_with_direction(true); - query.insert_range(1990_u32.to_be_bytes().to_vec()..1995_u32.to_be_bytes().to_vec()); + assert_eq!(elements.len(), 60); - let subquery_key: Vec = b"\0".to_vec(); - let mut subquery = Query::new(); - subquery.insert_all(); + let first_value = 100_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + let last_value = 109_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); - // Baseline query: no offset or limit + left to right - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 60); + compare_result_sets(&elements, &result_set); + } - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + #[test] + fn test_get_range_query_with_limit_and_offset() { + let db = make_test_grovedb(); + populate_tree_for_non_unique_range_subquery(&db); - assert_eq!(elements.len(), 250); + let path = vec![TEST_LEAF.to_vec()]; + let mut query = Query::new_with_direction(true); + query.insert_range(1990_u32.to_be_bytes().to_vec()..1995_u32.to_be_bytes().to_vec()); - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let subquery_key: Vec = b"\0".to_vec(); + let mut subquery = Query::new(); + subquery.insert_all(); - let mut last_value = 1994_u32.to_be_bytes().to_vec(); - last_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); + // Baseline query: no offset or limit + left to right + let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); - subquery.left_to_right = false; + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + assert_eq!(elements.len(), 250); - query.left_to_right = false; + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - // Baseline query: no offset or limit + right to left - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); + let mut last_value = 1994_u32.to_be_bytes().to_vec(); + last_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); - assert_eq!(elements.len(), 250); + subquery.left_to_right = false; - let mut first_value = 1994_u32.to_be_bytes().to_vec(); - first_value.append(&mut 149_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - let mut last_value = 1990_u32.to_be_bytes().to_vec(); - last_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.left_to_right = false; - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); - compare_result_sets(&elements, &result_set); + // Baseline query: no offset or limit + right to left + let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), None, None)); - subquery.left_to_right = true; + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + assert_eq!(elements.len(), 250); - query.left_to_right = true; + let mut first_value = 1994_u32.to_be_bytes().to_vec(); + first_value.append(&mut 149_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - // Limit the result to just 55 elements - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(55), None)); + let mut last_value = 1990_u32.to_be_bytes().to_vec(); + last_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); + compare_result_sets(&elements, &result_set); - assert_eq!(elements.len(), 55); + subquery.left_to_right = true; - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 100_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - // Second tree 5 element [100, 101, 102, 103, 104] - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 104_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.left_to_right = true; - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 55); - compare_result_sets(&elements, &result_set); + // Limit the result to just 55 elements + let path_query = + PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(55), None)); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - // Limit the result set to 60 elements but skip the first 14 elements - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(60), Some(14)), - ); + assert_eq!(elements.len(), 55); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); - - assert_eq!(elements.len(), 60); - - // Skips the first 14 elements, starts from the 15th - // i.e skips [100 - 113] starts from 114 - let mut first_value = 1990_u32.to_be_bytes().to_vec(); - first_value.append(&mut 114_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); - - // Continues for 60 iterations - // Takes 36 elements from the first tree (50 - 14) - // takes the remaining 24 from the second three (60 - 36) - let mut last_value = 1991_u32.to_be_bytes().to_vec(); - last_value.append(&mut 123_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); - - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); - - query.left_to_right = false; - - // Limit the result set to 60 element but skip first 10 elements (this time - // right to left) - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(60), Some(10)), - ); - - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 100_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - assert_eq!(elements.len(), 60); + // Second tree 5 element [100, 101, 102, 103, 104] + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 104_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - // Skips the first 10 elements from the back - // last tree and starts from the 11th before the end - // Underlying subquery is ascending - let mut first_value = 1994_u32.to_be_bytes().to_vec(); - first_value.append(&mut 110_u32.to_be_bytes().to_vec()); - assert_eq!(elements[0], first_value); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 55); + compare_result_sets(&elements, &result_set); - let mut last_value = 1993_u32.to_be_bytes().to_vec(); - last_value.append(&mut 119_u32.to_be_bytes().to_vec()); - assert_eq!(elements[elements.len() - 1], last_value); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 60); - compare_result_sets(&elements, &result_set); + // Limit the result set to 60 elements but skip the first 14 elements + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(60), Some(14)), + ); - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - query.left_to_right = true; + assert_eq!(elements.len(), 60); - // Offset bigger than elements in range - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), None, Some(5000)), - ); + // Skips the first 14 elements, starts from the 15th + // i.e skips [100 - 113] starts from 114 + let mut first_value = 1990_u32.to_be_bytes().to_vec(); + first_value.append(&mut 114_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + // Continues for 60 iterations + // Takes 36 elements from the first tree (50 - 14) + // takes the remaining 24 from the second three (60 - 36) + let mut last_value = 1991_u32.to_be_bytes().to_vec(); + last_value.append(&mut 123_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - assert_eq!(elements.len(), 0); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); + query.left_to_right = false; - query.set_subquery_key(subquery_key.clone()); - query.set_subquery(subquery); + // Limit the result set to 60 element but skip first 10 elements (this time + // right to left) + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(60), Some(10)), + ); - // Limit bigger than elements in range - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(5000), None), - ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 60); - assert_eq!(elements.len(), 250); + // Skips the first 10 elements from the back + // last tree and starts from the 11th before the end + // Underlying subquery is ascending + let mut first_value = 1994_u32.to_be_bytes().to_vec(); + first_value.append(&mut 110_u32.to_be_bytes().to_vec()); + assert_eq!(elements[0], first_value); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 250); + let mut last_value = 1993_u32.to_be_bytes().to_vec(); + last_value.append(&mut 119_u32.to_be_bytes().to_vec()); + assert_eq!(elements[elements.len() - 1], last_value); - // Test on unique subtree build - let db = make_test_grovedb(); - populate_tree_for_unique_range_subquery(&db); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery.clone()); - let mut query = Query::new_with_direction(true); - query.insert_range(1990_u32.to_be_bytes().to_vec()..2000_u32.to_be_bytes().to_vec()); + query.left_to_right = true; - query.set_subquery_key(subquery_key); + // Offset bigger than elements in range + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), None, Some(5000)), + ); - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(5), Some(2))); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 0); - assert_eq!(elements.len(), 5); + query.set_subquery_key(subquery_key.clone()); + query.set_subquery(subquery); - let first_value = 1992_u32.to_be_bytes().to_vec(); - assert_eq!(elements[0], first_value); + // Limit bigger than elements in range + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(5000), None), + ); - let last_value = 1996_u32.to_be_bytes().to_vec(); - assert_eq!(elements[elements.len() - 1], last_value); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); -} + assert_eq!(elements.len(), 250); -#[test] -fn test_correct_child_root_hash_propagation_for_parent_in_same_batch() { - let tmp_dir = TempDir::new().unwrap(); - let db = GroveDb::open(tmp_dir.path()).unwrap(); - let tree_name_slice: &[u8] = &[ - 2, 17, 40, 46, 227, 17, 179, 211, 98, 50, 130, 107, 246, 26, 147, 45, 234, 189, 245, 77, - 252, 86, 99, 107, 197, 226, 188, 54, 239, 64, 17, 37, - ]; - - let batch = vec![GroveDbOp::insert_op(vec![], vec![1], Element::empty_tree())]; - db.apply_batch(batch, None, None) - .unwrap() - .expect("should apply batch"); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 250); - let batch = vec![ - GroveDbOp::insert_op( - vec![vec![1]], - tree_name_slice.to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec()], - b"\0".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec()], - vec![1], - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![vec![1], tree_name_slice.to_vec(), vec![1]], - b"person".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - ], - b"\0".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - ], - b"firstName".to_vec(), - Element::empty_tree(), - ), - ]; - db.apply_batch(batch, None, None) - .unwrap() - .expect("should apply batch"); + // Test on unique subtree build + let db = make_test_grovedb(); + populate_tree_for_unique_range_subquery(&db); - let batch = vec![ - GroveDbOp::insert_op( - vec![ - vec![1], + let mut query = Query::new_with_direction(true); + query.insert_range(1990_u32.to_be_bytes().to_vec()..2000_u32.to_be_bytes().to_vec()); + + query.set_subquery_key(subquery_key); + + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(5), Some(2))); + + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements.len(), 5); + + let first_value = 1992_u32.to_be_bytes().to_vec(); + assert_eq!(elements[0], first_value); + + let last_value = 1996_u32.to_be_bytes().to_vec(); + assert_eq!(elements[elements.len() - 1], last_value); + } + + #[test] + fn test_correct_child_root_hash_propagation_for_parent_in_same_batch() { + let tmp_dir = TempDir::new().unwrap(); + let db = GroveDb::open(tmp_dir.path()).unwrap(); + let tree_name_slice: &[u8] = &[ + 2, 17, 40, 46, 227, 17, 179, 211, 98, 50, 130, 107, 246, 26, 147, 45, 234, 189, 245, + 77, 252, 86, 99, 107, 197, 226, 188, 54, 239, 64, 17, 37, + ]; + + let batch = vec![GroveDbOp::insert_op(vec![], vec![1], Element::empty_tree())]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); + + let batch = vec![ + GroveDbOp::insert_op( + vec![vec![1]], tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec()], b"\0".to_vec(), - ], - b"person_id_1".to_vec(), - Element::new_item(vec![50]), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - ], - b"cammi".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - b"cammi".to_vec(), - ], - b"\0".to_vec(), - Element::empty_tree(), - ), - GroveDbOp::insert_op( - vec![ - vec![1], - tree_name_slice.to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec()], vec![1], + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![vec![1], tree_name_slice.to_vec(), vec![1]], b"person".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + ], + b"\0".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + ], b"firstName".to_vec(), + Element::empty_tree(), + ), + ]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); + + let batch = vec![ + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"\0".to_vec(), + ], + b"person_id_1".to_vec(), + Element::new_item(vec![50]), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + ], b"cammi".to_vec(), + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + b"cammi".to_vec(), + ], b"\0".to_vec(), - ], - b"person_ref_id".to_vec(), - Element::new_reference(ReferencePathType::UpstreamRootHeightReference( - 4, - vec![b"\0".to_vec(), b"person_id_1".to_vec()], - )), - ), - ]; - db.apply_batch(batch, None, None) - .unwrap() - .expect("should apply batch"); - - let path = vec![ - vec![1], - tree_name_slice.to_vec(), - vec![1], - b"person".to_vec(), - b"firstName".to_vec(), - ]; - let mut query = Query::new(); - query.insert_all(); - query.set_subquery_key(b"\0".to_vec()); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery(subquery); - let path_query = PathQuery::new( - path, - SizedQuery { - query: query.clone(), - limit: Some(100), - offset: Some(0), - }, - ); + Element::empty_tree(), + ), + GroveDbOp::insert_op( + vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + b"cammi".to_vec(), + b"\0".to_vec(), + ], + b"person_ref_id".to_vec(), + Element::new_reference(ReferencePathType::UpstreamRootHeightReference( + 4, + vec![b"\0".to_vec(), b"person_id_1".to_vec()], + )), + ), + ]; + db.apply_batch(batch, None, None) + .unwrap() + .expect("should apply batch"); - let proof = db - .prove_query(&path_query) - .unwrap() - .expect("expected successful proving"); - let (hash, _result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); -} + let path = vec![ + vec![1], + tree_name_slice.to_vec(), + vec![1], + b"person".to_vec(), + b"firstName".to_vec(), + ]; + let mut query = Query::new(); + query.insert_all(); + query.set_subquery_key(b"\0".to_vec()); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery(subquery); + let path_query = PathQuery::new( + path, + SizedQuery { + query: query.clone(), + limit: Some(100), + offset: Some(0), + }, + ); -#[test] -fn test_mixed_level_proofs() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::new_item(vec![1]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key4", - Element::new_reference(ReferencePathType::SiblingReference(b"key2".to_vec())), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k1", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k2", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k3", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); + let proof = db + .prove_query(&path_query, None) + .unwrap() + .expect("expected successful proving"); + let (hash, _result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + } - assert_eq!(elements.len(), 5); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); + #[test] + fn test_mixed_level_proofs() { + let db = make_test_grovedb(); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); + // TEST_LEAF + // / | | \ + // key1 key2 : [1] key3 key4 : (Ref -> Key2) + // / | \ + // k1 k2 k3 + // / / / + // 2 3 4 - // Test mixed element proofs with limit and offset - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, + ) .unwrap() - .expect("successful get_path_query"); - - assert_eq!(elements.len(), 5); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - compare_result_sets(&elements, &result_set); - - // TODO: Fix noticed bug when limit and offset are both set to Some(0) + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::new_item(vec![1]), + None, + None, + ) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key3", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key4", + Element::new_reference(ReferencePathType::SiblingReference(b"key2".to_vec())), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); - let path_query = PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(1), None)); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k1", + Element::new_item(vec![2]), + None, + None, + ) .unwrap() - .expect("successful get_path_query"); - - assert_eq!(elements.len(), 1); - assert_eq!(elements, vec![vec![2]]); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - compare_result_sets(&elements, &result_set); - - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(3), Some(0)), - ); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k2", + Element::new_item(vec![3]), + None, + None, + ) .unwrap() - .expect("successful get_path_query"); - - assert_eq!(elements.len(), 3); - assert_eq!(elements, vec![vec![2], vec![3], vec![4]]); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - compare_result_sets(&elements, &result_set); - - let path_query = PathQuery::new( - path.clone(), - SizedQuery::new(query.clone(), Some(4), Some(0)), - ); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k3", + Element::new_item(vec![4]), + None, + None, + ) .unwrap() - .expect("successful get_path_query"); + .expect("successful item insert"); - assert_eq!(elements.len(), 4); - assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1]]); + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery(subquery); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - compare_result_sets(&elements, &result_set); + let path = vec![TEST_LEAF.to_vec()]; - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(10), Some(4))); - let (elements, _) = db - .query_item_value(&path_query, true, true, true, None) - .unwrap() - .expect("successful get_path_query"); + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); + + assert_eq!(elements.len(), 5); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join(" | ") + // ); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); + + // Test mixed element proofs with limit and offset + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - assert_eq!(elements.len(), 1); - assert_eq!(elements, vec![vec![1]]); + assert_eq!(elements.len(), 5); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1], vec![1]]); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - compare_result_sets(&elements, &result_set); -} + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + compare_result_sets(&elements, &result_set); -#[test] -fn test_mixed_level_proofs_with_tree() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"key1", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key2", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"key3", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k1", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k2", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key1"].as_ref(), - b"k3", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - db.insert( - [TEST_LEAF, b"key2"].as_ref(), - b"k1", - Element::new_item(vec![5]), - None, - None, - ) - .unwrap() - .expect("successful item insert"); - - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.add_conditional_subquery(QueryItem::Key(b"key1".to_vec()), None, Some(subquery)); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path.clone(), query.clone()); - - let (elements, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryResultType::QueryPathKeyElementTrioResultType, - None, - ) - .unwrap() - .expect("expected successful get_path_query"); + // TODO: Fix noticed bug when limit and offset are both set to Some(0) - assert_eq!(elements.len(), 5); + let path_query = + PathQuery::new(path.clone(), SizedQuery::new(query.clone(), Some(1), None)); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); + assert_eq!(elements.len(), 1); + assert_eq!(elements, vec![vec![2]]); - // TODO: verify that the result set is exactly the same - // compare_result_sets(&elements, &result_set); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + compare_result_sets(&elements, &result_set); - let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(1), None)); + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(3), Some(0)), + ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); - let (elements, _) = db - .query_raw( - &path_query, - true, - true, - true, - QueryResultType::QueryPathKeyElementTrioResultType, - None, - ) - .unwrap() - .expect("expected successful get_path_query"); + assert_eq!(elements.len(), 3); + assert_eq!(elements, vec![vec![2], vec![3], vec![4]]); - assert_eq!(elements.len(), 1); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + compare_result_sets(&elements, &result_set); - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - // TODO: verify that the result set is exactly the same - // compare_result_sets(&elements, &result_set); -} + let path_query = PathQuery::new( + path.clone(), + SizedQuery::new(query.clone(), Some(4), Some(0)), + ); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); -#[test] -fn test_mixed_level_proofs_with_subquery_paths() { - let db = make_test_grovedb(); - db.insert( - [TEST_LEAF].as_ref(), - b"a", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"b", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF].as_ref(), - b"c", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"d", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"e", - Element::new_item(vec![2]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"a"].as_ref(), - b"f", - Element::new_item(vec![3]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"a", b"d"].as_ref(), - b"d", - Element::new_item(vec![6]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"b"].as_ref(), - b"g", - Element::new_item(vec![4]), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b"].as_ref(), - b"d", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"i", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"j", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - db.insert( - [TEST_LEAF, b"b", b"d"].as_ref(), - b"k", - Element::empty_tree(), - None, - None, - ) - .unwrap() - .expect("successful subtree insert"); - - // if you don't have an item at the subquery path translation, you shouldn't be - // added to the result set. - let mut query = Query::new(); - query.insert_all(); - query.set_subquery_path(vec![b"d".to_vec()]); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - // TODO: proofs seems to be more expressive than query_raw now - // let (elements, _) = db - // .query_raw( - // &path_query, - // true, - // QueryResultType::QueryPathKeyElementTrioResultType, - // None, - // ) - // .unwrap() - // .expect("expected successful get_path_query"); - // - // assert_eq!(elements.len(), 2); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - - // apply path translation then query - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery_path(vec![b"d".to_vec()]); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - // apply empty path translation - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - query.set_subquery_path(vec![]); - query.set_subquery(subquery); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - // use conditionals to return from more than 2 depth - let mut query = Query::new(); - query.insert_all(); - let mut subquery = Query::new(); - subquery.insert_all(); - let mut deeper_subquery = Query::new(); - deeper_subquery.insert_all(); - subquery.add_conditional_subquery(QueryItem::Key(b"d".to_vec()), None, Some(deeper_subquery)); - query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, Some(subquery.clone())); - query.add_conditional_subquery(QueryItem::Key(b"b".to_vec()), None, Some(subquery.clone())); - - let path = vec![TEST_LEAF.to_vec()]; - - let path_query = PathQuery::new_unsized(path, query.clone()); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 8); -} + assert_eq!(elements.len(), 4); + assert_eq!(elements, vec![vec![2], vec![3], vec![4], vec![1]]); -#[test] -fn test_proof_with_limit_zero() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec()], - SizedQuery::new(query, Some(0), Some(0)), - ); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 0); -} + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + compare_result_sets(&elements, &result_set); -#[test] -fn test_result_set_path_after_verification() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - // assert the result set path - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - - // Test path tracking with subquery - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[3].path, - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] - ); - assert_eq!( - result_set[4].path, - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] - ); - - // Test path tracking with subquery path - // perform a query, do a translation, perform another query - let mut query = Query::new(); - query.insert_key(b"deep_leaf".to_vec()); - query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - let path_query = PathQuery::new_unsized(vec![], query); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - assert_eq!( - result_set[0].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - assert_eq!( - result_set[1].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - assert_eq!( - result_set[2].path, - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ] - ); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - - // Test path tracking for mixed level result set - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.add_conditional_subquery(QueryItem::Key(b"innertree".to_vec()), None, Some(subq)); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - assert_eq!( - result_set[0].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].path, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!(result_set[3].path, vec![TEST_LEAF.to_vec()]); - - assert_eq!(result_set[0].key, b"key1".to_vec()); - assert_eq!(result_set[1].key, b"key2".to_vec()); - assert_eq!(result_set[2].key, b"key3".to_vec()); - assert_eq!(result_set[3].key, b"innertree4".to_vec()); -} + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(10), Some(4))); + let (elements, _) = db + .query_item_value(&path_query, true, true, true, None) + .unwrap() + .expect("successful get_path_query"); -#[test] -fn test_verification_with_path_key_optional_element_trio() { - let db = make_deep_tree(); - let mut query = Query::new(); - query.insert_all(); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 3); - - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) + assert_eq!(elements.len(), 1); + assert_eq!(elements, vec![vec![1]]); + } + + #[test] + fn test_mixed_level_proofs_with_tree() { + let db = make_test_grovedb(); + db.insert( + [TEST_LEAF].as_ref(), + b"key1", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[1], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key2", + Element::empty_tree(), + None, + None, ) - ); - assert_eq!( - result_set[2], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"key3", + Element::empty_tree(), + None, + None, ) - ); -} - -#[test] -fn test_absence_proof() { - let db = make_deep_tree(); - - // simple case, request for items k2..=k5 under inner tree - // we pass them as keys as terminal keys does not handle ranges with start or - // end len greater than 1 k2, k3 should be Some, k4, k5 should be None, k1, - // k6.. should not be in map - let mut query = Query::new(); - query.insert_key(b"key2".to_vec()); - query.insert_key(b"key3".to_vec()); - query.insert_key(b"key4".to_vec()); - query.insert_key(b"key5".to_vec()); - let path_query = PathQuery::new( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - SizedQuery::new(query, Some(4), None), - ); - - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query_with_absence_proof(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 4); - - assert_eq!( - result_set[0].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[1].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[2].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - assert_eq!( - result_set[3].0, - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] - ); - - assert_eq!(result_set[0].1, b"key2".to_vec()); - assert_eq!(result_set[1].1, b"key3".to_vec()); - assert_eq!(result_set[2].1, b"key4".to_vec()); - assert_eq!(result_set[3].1, b"key5".to_vec()); - - assert_eq!(result_set[0].2, Some(Element::new_item(b"value2".to_vec()))); - assert_eq!(result_set[1].2, Some(Element::new_item(b"value3".to_vec()))); - assert_eq!(result_set[2].2, None); - assert_eq!(result_set[3].2, None); -} + .unwrap() + .expect("successful subtree insert"); -#[test] -fn test_subset_proof_verification() { - let db = make_deep_tree(); - - // original path query - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - // first we prove non-verbose - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 5); - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) - ) - ); - assert_eq!( - result_set[1], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) - ) - ); - assert_eq!( - result_set[2], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k1", + Element::new_item(vec![2]), + None, + None, ) - ); - assert_eq!( - result_set[3], - ( - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], - b"key4".to_vec(), - Some(Element::new_item(b"value4".to_vec())) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k2", + Element::new_item(vec![3]), + None, + None, ) - ); - assert_eq!( - result_set[4], - ( - vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], - b"key5".to_vec(), - Some(Element::new_item(b"value5".to_vec())) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key1"].as_ref(), + b"k3", + Element::new_item(vec![4]), + None, + None, ) - ); - - // prove verbose - let verbose_proof = db.prove_verbose(&path_query).unwrap().unwrap(); - assert!(verbose_proof.len() > proof.len()); - - // subset path query - let mut query = Query::new(); - query.insert_key(b"innertree".to_vec()); - let mut subq = Query::new(); - subq.insert_key(b"key1".to_vec()); - query.set_subquery(subq); - let subset_path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - let (hash, result_set) = - GroveDb::verify_subset_query(&verbose_proof, &subset_path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - assert_eq!( - result_set[0], - ( - vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) + .unwrap() + .expect("successful item insert"); + db.insert( + [TEST_LEAF, b"key2"].as_ref(), + b"k1", + Element::new_item(vec![5]), + None, + None, ) - ); -} + .unwrap() + .expect("successful item insert"); -#[test] -fn test_chained_path_query_verification() { - let db = make_deep_tree(); - - let mut query = Query::new(); - query.insert_all(); - let mut subq = Query::new(); - subq.insert_all(); - let mut subsubq = Query::new(); - subsubq.insert_all(); - - subq.set_subquery(subsubq); - query.set_subquery(subq); - - let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], query); - - // first prove non verbose - let proof = db.prove_query(&path_query).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 11); - - // prove verbose - let verbose_proof = db.prove_verbose(&path_query).unwrap().unwrap(); - assert!(verbose_proof.len() > proof.len()); - - // init deeper_1 path query - let mut query = Query::new(); - query.insert_all(); - - let deeper_1_path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec(), - ], - query, - ); - - // define the path query generators - let mut chained_path_queries = vec![]; - chained_path_queries.push(|_elements: Vec| { let mut query = Query::new(); query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.add_conditional_subquery(QueryItem::Key(b"key1".to_vec()), None, Some(subquery)); - let deeper_2_path_query = PathQuery::new_unsized( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_2".to_vec(), - ], - query, - ); - Some(deeper_2_path_query) - }); - - // verify the path query chain - let (root_hash, results) = GroveDb::verify_query_with_chained_path_queries( - &verbose_proof, - &deeper_1_path_query, - chained_path_queries, - ) - .unwrap(); - assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(results.len(), 2); - assert_eq!(results[0].len(), 3); - assert_eq!( - results[0][0], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key1".to_vec(), - Some(Element::new_item(b"value1".to_vec())) - ) - ); - assert_eq!( - results[0][1], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key2".to_vec(), - Some(Element::new_item(b"value2".to_vec())) - ) - ); - assert_eq!( - results[0][2], - ( - vec![ - b"deep_leaf".to_vec(), - b"deep_node_1".to_vec(), - b"deeper_1".to_vec() - ], - b"key3".to_vec(), - Some(Element::new_item(b"value3".to_vec())) + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path.clone(), query.clone()); + + let (elements, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements.len(), 5); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join(", ") + // ); + assert_eq!(result_set.len(), 5); + + // TODO: verify that the result set is exactly the same + // compare_result_sets(&elements, &result_set); + + let path_query = PathQuery::new(path, SizedQuery::new(query.clone(), Some(1), None)); + + let (elements, _) = db + .query_raw( + &path_query, + true, + true, + true, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!(elements.len(), 1); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + // TODO: verify that the result set is exactly the same + // compare_result_sets(&elements, &result_set); + } + + #[test] + fn test_mixed_level_proofs_with_subquery_paths() { + let db = make_test_grovedb(); + + // TEST_LEAF + // / | \ + // a b c + // / | \ / \ + // d e:2 f:3 g:4 d + // / / | \ + // d:6 i j k + // + + db.insert( + [TEST_LEAF].as_ref(), + b"a", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"b", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF].as_ref(), + b"c", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"d", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"e", + Element::new_item(vec![2]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"a"].as_ref(), + b"f", + Element::new_item(vec![3]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"a", b"d"].as_ref(), + b"d", + Element::new_item(vec![6]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"b"].as_ref(), + b"g", + Element::new_item(vec![4]), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b"].as_ref(), + b"d", + Element::empty_tree(), + None, + None, ) - ); + .unwrap() + .expect("successful subtree insert"); + + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"i", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"j", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + db.insert( + [TEST_LEAF, b"b", b"d"].as_ref(), + b"k", + Element::empty_tree(), + None, + None, + ) + .unwrap() + .expect("successful subtree insert"); + // // if you don't have an item at the subquery path translation, you shouldn't + // be // added to the result set. + let mut query = Query::new(); + query.insert_all(); + query.set_subquery_path(vec![b"d".to_vec()]); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let (elements, _) = db + .query_raw( + &path_query, + false, + true, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!( + elements, + QueryResultElements::from_elements(vec![ + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"a".to_vec()], + b"d".to_vec(), + Element::Tree(Some(b"d".to_vec()), None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec()], + b"d".to_vec(), + Element::Tree(Some(b"j".to_vec()), None) + )) + ]) + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + // println!( + // "{}", + // result_set + // .iter() + // .map(|a| a.to_string()) + // .collect::>() + // .join("| ") + // ); + assert_eq!(result_set.len(), 2); + + // apply path translation then query + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery_path(vec![b"d".to_vec()]); + query.set_subquery(subquery); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let (elements, _) = db + .query_raw( + &path_query, + false, + true, + false, + QueryResultType::QueryPathKeyElementTrioResultType, + None, + ) + .unwrap() + .expect("expected successful get_path_query"); + + assert_eq!( + elements, + QueryResultElements::from_elements(vec![ + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"a".to_vec(), b"d".to_vec()], + b"d".to_vec(), + Element::Item(vec![6], None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"i".to_vec(), + Element::Tree(None, None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"j".to_vec(), + Element::Tree(None, None) + )), + PathKeyElementTrioResultItem(( + vec![b"test_leaf".to_vec(), b"b".to_vec(), b"d".to_vec()], + b"k".to_vec(), + Element::Tree(None, None) + )) + ]) + ); - assert_eq!(results[1].len(), 3); - assert_eq!( - results[1][0], - ( + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + // apply empty path translation + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + query.set_subquery_path(vec![]); + query.set_subquery(subquery); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + + // use conditionals to return from more than 2 depth + let mut query = Query::new(); + query.insert_all(); + let mut subquery = Query::new(); + subquery.insert_all(); + let mut deeper_subquery = Query::new(); + deeper_subquery.insert_all(); + subquery.add_conditional_subquery( + QueryItem::Key(b"d".to_vec()), + None, + Some(deeper_subquery), + ); + query.add_conditional_subquery(QueryItem::Key(b"a".to_vec()), None, Some(subquery.clone())); + query.add_conditional_subquery(QueryItem::Key(b"b".to_vec()), None, Some(subquery.clone())); + + let path = vec![TEST_LEAF.to_vec()]; + + let path_query = PathQuery::new_unsized(path, query.clone()); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 8); + } + + #[test] + fn test_proof_with_limit_zero() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec()], + SizedQuery::new(query, Some(0), Some(0)), + ); + + db.prove_query(&path_query, None) + .unwrap() + .expect_err("expected error when trying to prove with limit 0"); + } + + #[test] + fn test_result_set_path_after_verification() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + // assert the result set path + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + + // Test path tracking with subquery + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[3].path, + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] + ); + assert_eq!( + result_set[4].path, + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()] + ); + + // Test path tracking with subquery path + // perform a query, do a translation, perform another query + let mut query = Query::new(); + query.insert_key(b"deep_leaf".to_vec()); + query.set_subquery_path(vec![b"deep_node_1".to_vec(), b"deeper_1".to_vec()]); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + let path_query = PathQuery::new_unsized(vec![], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + assert_eq!( + result_set[0].path, vec![ b"deep_leaf".to_vec(), b"deep_node_1".to_vec(), - b"deeper_2".to_vec() - ], - b"key4".to_vec(), - Some(Element::new_item(b"value4".to_vec())) - ) - ); - assert_eq!( - results[1][1], - ( + b"deeper_1".to_vec() + ] + ); + assert_eq!( + result_set[1].path, vec![ b"deep_leaf".to_vec(), b"deep_node_1".to_vec(), - b"deeper_2".to_vec() - ], - b"key5".to_vec(), - Some(Element::new_item(b"value5".to_vec())) - ) - ); - assert_eq!( - results[1][2], - ( + b"deeper_1".to_vec() + ] + ); + assert_eq!( + result_set[2].path, vec![ b"deep_leaf".to_vec(), b"deep_node_1".to_vec(), - b"deeper_2".to_vec() + b"deeper_1".to_vec() + ] + ); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + + // Test path tracking for mixed level result set + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.add_conditional_subquery(QueryItem::Key(b"innertree".to_vec()), None, Some(subq)); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query_raw(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + assert_eq!( + result_set[0].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].path, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!(result_set[3].path, vec![TEST_LEAF.to_vec()]); + + assert_eq!(result_set[0].key, b"key1".to_vec()); + assert_eq!(result_set[1].key, b"key2".to_vec()); + assert_eq!(result_set[2].key, b"key3".to_vec()); + assert_eq!(result_set[3].key, b"innertree4".to_vec()); + } + + #[test] + fn test_verification_with_path_key_optional_element_trio() { + let db = make_deep_tree(); + let mut query = Query::new(); + query.insert_all(); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 3); + + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + result_set[1], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + result_set[2], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); + } + + #[test] + fn test_absence_proof() { + let db = make_deep_tree(); + + // simple case, request for items k2..=k5 under inner tree + // we pass them as keys as terminal keys does not handle ranges with start or + // end len greater than 1 k2, k3 should be Some, k4, k5 should be None, k1, + // k6.. should not be in map + let mut query = Query::new(); + query.insert_key(b"key2".to_vec()); + query.insert_key(b"key3".to_vec()); + query.insert_key(b"key4".to_vec()); + query.insert_key(b"key5".to_vec()); + let path_query = PathQuery::new( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + SizedQuery::new(query, Some(4), None), + ); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = + GroveDb::verify_query_with_absence_proof(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 4); + + assert_eq!( + result_set[0].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[1].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[2].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + assert_eq!( + result_set[3].0, + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()] + ); + + assert_eq!(result_set[0].1, b"key2".to_vec()); + assert_eq!(result_set[1].1, b"key3".to_vec()); + assert_eq!(result_set[2].1, b"key4".to_vec()); + assert_eq!(result_set[3].1, b"key5".to_vec()); + + assert_eq!(result_set[0].2, Some(Element::new_item(b"value2".to_vec()))); + assert_eq!(result_set[1].2, Some(Element::new_item(b"value3".to_vec()))); + assert_eq!(result_set[2].2, None); + assert_eq!(result_set[3].2, None); + } + + #[test] + fn test_subset_proof_verification() { + let db = make_deep_tree(); + + // original path query + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 5); + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + result_set[1], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + result_set[2], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); + assert_eq!( + result_set[3], + ( + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], + b"key4".to_vec(), + Some(Element::new_item(b"value4".to_vec())) + ) + ); + assert_eq!( + result_set[4], + ( + vec![TEST_LEAF.to_vec(), b"innertree4".to_vec()], + b"key5".to_vec(), + Some(Element::new_item(b"value5".to_vec())) + ) + ); + + // subset path query + let mut query = Query::new(); + query.insert_key(b"innertree".to_vec()); + let mut subq = Query::new(); + subq.insert_key(b"key1".to_vec()); + query.set_subquery(subq); + let subset_path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + let (hash, result_set) = GroveDb::verify_subset_query(&proof, &subset_path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + assert_eq!( + result_set[0], + ( + vec![TEST_LEAF.to_vec(), b"innertree".to_vec()], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + } + #[test] + fn test_chained_path_query_verification() { + let db = make_deep_tree(); + + let mut query = Query::new(); + query.insert_all(); + let mut subq = Query::new(); + subq.insert_all(); + let mut subsubq = Query::new(); + subsubq.insert_all(); + + subq.set_subquery(subsubq); + query.set_subquery(subq); + + let path_query = PathQuery::new_unsized(vec![b"deep_leaf".to_vec()], query); + + // first prove non verbose + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 14); + + // init deeper_1 path query + let mut query = Query::new(); + query.insert_all(); + + let deeper_1_path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec(), ], - b"key6".to_vec(), - Some(Element::new_item(b"value6".to_vec())) + query, + ); + + // define the path query generators + let mut chained_path_queries = vec![]; + chained_path_queries.push(|_elements: Vec| { + let mut query = Query::new(); + query.insert_all(); + + let deeper_2_path_query = PathQuery::new_unsized( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec(), + ], + query, + ); + Some(deeper_2_path_query) + }); + + // verify the path query chain + let (root_hash, results) = GroveDb::verify_query_with_chained_path_queries( + &proof, + &deeper_1_path_query, + chained_path_queries, ) - ); -} + .unwrap(); + assert_eq!(root_hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(results.len(), 2); + assert_eq!(results[0].len(), 3); + assert_eq!( + results[0][0], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key1".to_vec(), + Some(Element::new_item(b"value1".to_vec())) + ) + ); + assert_eq!( + results[0][1], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key2".to_vec(), + Some(Element::new_item(b"value2".to_vec())) + ) + ); + assert_eq!( + results[0][2], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_1".to_vec() + ], + b"key3".to_vec(), + Some(Element::new_item(b"value3".to_vec())) + ) + ); -#[test] -fn test_query_b_depends_on_query_a() { - // we have two trees - // one with a mapping of id to name - // another with a mapping of name to age - // we want to get the age of every one after a certain id ordered by name - let db = make_test_grovedb(); - - // TEST_LEAF contains the id to name mapping - db.insert( - [TEST_LEAF].as_ref(), - &[1], - Element::new_item(b"d".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[2], - Element::new_item(b"b".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[3], - Element::new_item(b"c".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [TEST_LEAF].as_ref(), - &[4], - Element::new_item(b"a".to_vec()), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - - // ANOTHER_TEST_LEAF contains the name to age mapping - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"a", - Element::new_item(vec![10]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"b", - Element::new_item(vec![30]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"c", - Element::new_item(vec![12]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - db.insert( - [ANOTHER_TEST_LEAF].as_ref(), - b"d", - Element::new_item(vec![46]), - None, - None, - ) - .unwrap() - .expect("successful root tree leaf insert"); - - // Query: return the age of everyone greater than id 2 ordered by name - // id 2 - b - // so we want to return the age for c and d = 12, 46 respectively - // the proof generator knows that id 2 = b, but the verifier doesn't - // hence we need to generate two proofs - // prove that 2 - b then prove age after b - // the verifier has to use the result of the first proof 2 - b - // to generate the path query for the verification of the second proof - - // query name associated with id 2 - let mut query = Query::new(); - query.insert_key(vec![2]); - let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); - - // first we show that this returns the correct output - let proof = db.prove_query(&path_query_one).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_one).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 1); - assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); - - // next query should return the age for elements above b - let mut query = Query::new(); - query.insert_range_after(b"b".to_vec()..); - let path_query_two = PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); - - // show that we get the correct output - let proof = db.prove_query(&path_query_two).unwrap().unwrap(); - let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_two).unwrap(); - assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); - assert_eq!(result_set.len(), 2); - assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); - assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); - - // now we merge the path queries - let mut merged_path_queries = PathQuery::merge(vec![&path_query_one, &path_query_two]).unwrap(); - merged_path_queries.query.limit = Some(3); - let proof = db.prove_verbose(&merged_path_queries).unwrap().unwrap(); - - // verifier only has access to the statement age > 2 - // need to first get the name associated with 2 from the proof - // then use that to construct the next path query - let mut chained_path_queries = vec![]; - chained_path_queries.push(|prev_elements: Vec| { - let mut query = Query::new(); - let name_element = prev_elements[0].2.as_ref().unwrap(); - if let Element::Item(name, ..) = name_element { - query.insert_range_after(name.to_owned()..); - Some(PathQuery::new( - vec![ANOTHER_TEST_LEAF.to_vec()], - SizedQuery::new(query, Some(2), None), - )) - } else { - None - } - }); - - // add limit to path query one - path_query_one.query.limit = Some(1); - - let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( - proof.as_slice(), - &path_query_one, - chained_path_queries, - ) - .unwrap(); - assert_eq!(result_set.len(), 2); - assert_eq!(result_set[0].len(), 1); - assert_eq!(result_set[1].len(), 2); - - let age_result = result_set[1].clone(); - assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); - assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); -} + assert_eq!(results[1].len(), 3); + assert_eq!( + results[1][0], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key4".to_vec(), + Some(Element::new_item(b"value4".to_vec())) + ) + ); + assert_eq!( + results[1][1], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key5".to_vec(), + Some(Element::new_item(b"value5".to_vec())) + ) + ); + assert_eq!( + results[1][2], + ( + vec![ + b"deep_leaf".to_vec(), + b"deep_node_1".to_vec(), + b"deeper_2".to_vec() + ], + b"key6".to_vec(), + Some(Element::new_item(b"value6".to_vec())) + ) + ); + } -#[test] -fn test_prove_absent_path_with_intermediate_emtpy_tree() { - // root - // test_leaf (empty) - let grovedb = make_test_grovedb(); + #[test] + fn test_query_b_depends_on_query_a() { + // we have two trees + // one with a mapping of id to name + // another with a mapping of name to age + // we want to get the age of every one after a certain id ordered by name + let db = make_test_grovedb(); - // prove the absence of key "book" in ["test_leaf", "invalid"] - let mut query = Query::new(); - query.insert_key(b"book".to_vec()); - let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"invalid".to_vec()], query); + // TEST_LEAF contains the id to name mapping + db.insert( + [TEST_LEAF].as_ref(), + &[1], + Element::new_item(b"d".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[2], + Element::new_item(b"b".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[3], + Element::new_item(b"c".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [TEST_LEAF].as_ref(), + &[4], + Element::new_item(b"a".to_vec()), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); - let proof = grovedb - .prove_query(&path_query) + // ANOTHER_TEST_LEAF contains the name to age mapping + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"a", + Element::new_item(vec![10]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"b", + Element::new_item(vec![30]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"c", + Element::new_item(vec![12]), + None, + None, + ) .unwrap() - .expect("should generate proofs"); + .expect("successful root tree leaf insert"); + db.insert( + [ANOTHER_TEST_LEAF].as_ref(), + b"d", + Element::new_item(vec![46]), + None, + None, + ) + .unwrap() + .expect("successful root tree leaf insert"); + + // Query: return the age of everyone greater than id 2 ordered by name + // id 2 - b + // so we want to return the age for c and d = 12, 46 respectively + // the proof generator knows that id 2 = b, but the verifier doesn't + // hence we need to generate two proofs + // prove that 2 - b then prove age after b + // the verifier has to use the result of the first proof 2 - b + // to generate the path query for the verification of the second proof + + // query name associated with id 2 + let mut query = Query::new(); + query.insert_key(vec![2]); + let mut path_query_one = PathQuery::new_unsized(vec![TEST_LEAF.to_vec()], query); + + // first we show that this returns the correct output + let proof = db.prove_query(&path_query_one, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_one).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 1); + assert_eq!(result_set[0].2, Some(Element::new_item(b"b".to_vec()))); + + // next query should return the age for elements above b + let mut query = Query::new(); + query.insert_range_after(b"b".to_vec()..); + let path_query_two = PathQuery::new_unsized(vec![ANOTHER_TEST_LEAF.to_vec()], query); + + // show that we get the correct output + let proof = db.prove_query(&path_query_two, None).unwrap().unwrap(); + let (hash, result_set) = GroveDb::verify_query(&proof, &path_query_two).unwrap(); + assert_eq!(hash, db.root_hash(None).unwrap().unwrap()); + assert_eq!(result_set.len(), 2); + assert_eq!(result_set[0].2, Some(Element::new_item(vec![12]))); + assert_eq!(result_set[1].2, Some(Element::new_item(vec![46]))); + + // now we merge the path queries + let mut merged_path_queries = + PathQuery::merge(vec![&path_query_one, &path_query_two]).unwrap(); + merged_path_queries.query.limit = Some(3); + let proof = db.prove_query(&merged_path_queries, None).unwrap().unwrap(); + + // verifier only has access to the statement age > 2 + // need to first get the name associated with 2 from the proof + // then use that to construct the next path query + let mut chained_path_queries = vec![]; + chained_path_queries.push(|prev_elements: Vec| { + let mut query = Query::new(); + let name_element = prev_elements[0].2.as_ref().unwrap(); + if let Element::Item(name, ..) = name_element { + query.insert_range_after(name.to_owned()..); + Some(PathQuery::new( + vec![ANOTHER_TEST_LEAF.to_vec()], + SizedQuery::new(query, Some(2), None), + )) + } else { + None + } + }); + + // add limit to path query one + path_query_one.query.limit = Some(1); - let (root_hash, result_set) = - GroveDb::verify_query(proof.as_slice(), &path_query).expect("should verify proof"); - assert_eq!(result_set.len(), 0); - assert_eq!(root_hash, grovedb.root_hash(None).unwrap().unwrap()); + let (_, result_set) = GroveDb::verify_query_with_chained_path_queries( + proof.as_slice(), + &path_query_one, + chained_path_queries, + ) + .unwrap(); + assert_eq!(result_set.len(), 2); + assert_eq!(result_set[0].len(), 1); + assert_eq!(result_set[1].len(), 2); + + let age_result = result_set[1].clone(); + assert_eq!(age_result[0].2, Some(Element::new_item(vec![12]))); + assert_eq!(age_result[1].2, Some(Element::new_item(vec![46]))); + } + + #[test] + fn test_prove_absent_path_with_intermediate_emtpy_tree() { + // root + // test_leaf (empty) + let grovedb = make_test_grovedb(); + + // prove the absence of key "book" in ["test_leaf", "invalid"] + let mut query = Query::new(); + query.insert_key(b"book".to_vec()); + let path_query = + PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"invalid".to_vec()], query); + + let proof = grovedb + .prove_query(&path_query, None) + .unwrap() + .expect("should generate proofs"); + + let (root_hash, result_set) = + GroveDb::verify_query(proof.as_slice(), &path_query).expect("should verify proof"); + assert_eq!(result_set.len(), 0); + assert_eq!(root_hash, grovedb.root_hash(None).unwrap().unwrap()); + } } diff --git a/grovedb/src/tests/sum_tree_tests.rs b/grovedb/src/tests/sum_tree_tests.rs index 6c4a75895..8f28932f6 100644 --- a/grovedb/src/tests/sum_tree_tests.rs +++ b/grovedb/src/tests/sum_tree_tests.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Sum tree tests use grovedb_merk::{ @@ -103,7 +75,7 @@ fn test_sum_tree_behaves_like_regular_tree() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"key".to_vec()], query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (root_hash, result_set) = @@ -169,7 +141,7 @@ fn test_sum_item_behaves_like_regular_item() { let path_query = PathQuery::new_unsized(vec![TEST_LEAF.to_vec(), b"sumkey".to_vec()], query); let proof = db - .prove_query(&path_query) + .prove_query(&path_query, None) .unwrap() .expect("should generate proof"); let (root_hash, result_set) = diff --git a/grovedb/src/versioning.rs b/grovedb/src/versioning.rs deleted file mode 100644 index 5a724afc9..000000000 --- a/grovedb/src/versioning.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::io::Cursor; - -use integer_encoding::{VarInt, VarIntReader}; - -use crate::Error; - -pub(crate) const PROOF_VERSION: u32 = 1; - -/// Reads a version number from the given byte slice using variable-length -/// encoding. Returns a Result containing the parsed u32 version number, or an -/// Error if the data is corrupted and could not be read. -pub fn read_proof_version(mut bytes: &[u8]) -> Result { - bytes - .read_varint() - .map_err(|_| Error::CorruptedData("could not read version info".to_string())) -} - -/// Reads a version number from the given byte slice using variable-length -/// encoding, and returns the version number as well as a slice of the remaining -/// bytes. -pub fn read_and_consume_proof_version(bytes: &[u8]) -> Result<(u32, &[u8]), Error> { - let mut cursor = Cursor::new(bytes); - let version_number = cursor - .read_varint() - .map_err(|_| Error::CorruptedData("sdfs".to_string()))?; - let version_length: usize = cursor.position() as usize; - Ok((version_number, &bytes[version_length..])) -} - -/// Encodes the given version number as variable-length bytes and adds it to the -/// beginning of the given Vec, returning the modified vector. -pub(crate) fn prepend_version_to_bytes(mut bytes: Vec, version: u32) -> Result, Error> { - let version_bytes = version.encode_var_vec(); - bytes.splice(..0, version_bytes); - Ok(bytes) -} - -#[cfg(test)] -mod tests { - - use crate::versioning::{ - prepend_version_to_bytes, read_and_consume_proof_version, read_proof_version, - }; - - #[test] - fn read_correct_version() { - let data = vec![1, 2, 3]; - let version = 500_u32; - - // prepend the version information to the data vector - let new_data = prepend_version_to_bytes(data, version).unwrap(); - assert_eq!(new_data, [244, 3, 1, 2, 3]); - - // show that read_version doesn't consume - assert_eq!(read_proof_version(new_data.as_slice()).unwrap(), 500); - assert_eq!(new_data, [244, 3, 1, 2, 3]); - - // show that we consume the version number and return the remaining vector - let (version_number, data_vec) = read_and_consume_proof_version(&new_data).unwrap(); - assert_eq!(version_number, 500_u32); - assert_eq!(data_vec, [1, 2, 3]); - } -} diff --git a/merk/Cargo.toml b/merk/Cargo.toml index d7864897d..2d1c65bea 100644 --- a/merk/Cargo.toml +++ b/merk/Cargo.toml @@ -19,15 +19,12 @@ indexmap = "2.2.6" grovedb-costs = { version = "1.0.0-rc.2", path = "../costs" } grovedb-visualize = { version = "1.0.0-rc.2", path = "../visualize" } grovedb-path = { version = "1.0.0-rc.2", path = "../path" } +hex = { version = "0.4.3" } [dependencies.time] version = "0.3.34" optional = true -[dependencies.hex] -version = "0.4.3" -optional = true - [dependencies.colored] version = "2.1.0" optional = true @@ -55,9 +52,9 @@ optional = true [features] default = ["full"] +proof_debug = [] full = ["rand", "time", - "hex", "colored", "num_cpus", "byteorder", diff --git a/merk/benches/merk.rs b/merk/benches/merk.rs index b0f9cca44..ff0fbaeff 100644 --- a/merk/benches/merk.rs +++ b/merk/benches/merk.rs @@ -408,7 +408,7 @@ pub fn prove_1m_2k_rand(c: &mut Criterion) { b.iter_with_large_drop(|| { let keys = prove_keys_per_batch[i % n_batches].clone(); - merk.prove_unchecked(keys, None, None, true) + merk.prove_unchecked(keys, None, true) .unwrap() .expect("prove failed"); i += 1; diff --git a/merk/src/lib.rs b/merk/src/lib.rs index 356bd5b8d..d746a885a 100644 --- a/merk/src/lib.rs +++ b/merk/src/lib.rs @@ -70,12 +70,8 @@ mod visualize; #[cfg(feature = "full")] pub use ed; -#[cfg(feature = "full")] -pub use error::Error; #[cfg(any(feature = "full", feature = "verify"))] -pub use proofs::query::execute_proof; -#[cfg(any(feature = "full", feature = "verify"))] -pub use proofs::query::verify_query; +pub use error::Error; #[cfg(feature = "full")] pub use tree::{ BatchEntry, Link, MerkBatch, Op, PanicSource, HASH_BLOCK_SIZE, HASH_BLOCK_SIZE_U32, diff --git a/merk/src/merk/chunks.rs b/merk/src/merk/chunks.rs index f6b1b64cf..ef94571e5 100644 --- a/merk/src/merk/chunks.rs +++ b/merk/src/merk/chunks.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - use std::collections::VecDeque; use ed::Encode; diff --git a/merk/src/merk/mod.rs b/merk/src/merk/mod.rs index 28ce3f436..e9bab4f71 100644 --- a/merk/src/merk/mod.rs +++ b/merk/src/merk/mod.rs @@ -286,6 +286,14 @@ where }) } + /// Returns if the merk has a root tree set + pub fn has_root_key(&self) -> bool { + let tree = self.tree.take(); + let res = tree.is_some(); + self.tree.set(tree); + res + } + /// Returns the total sum value in the Merk tree pub fn sum(&self) -> Result, Error> { self.use_tree(|tree| match tree { diff --git a/merk/src/merk/prove.rs b/merk/src/merk/prove.rs index 7f2955349..99227c131 100644 --- a/merk/src/merk/prove.rs +++ b/merk/src/merk/prove.rs @@ -29,14 +29,13 @@ where &self, query: Query, limit: Option, - offset: Option, ) -> CostResult { let left_to_right = query.left_to_right; - self.prove_unchecked(query, limit, offset, left_to_right) - .map_ok(|(proof, limit, offset)| { + self.prove_unchecked(query, limit, left_to_right) + .map_ok(|(proof, limit)| { let mut bytes = Vec::with_capacity(128); encode_into(proof.iter(), &mut bytes); - ProofConstructionResult::new(bytes, limit, offset) + ProofConstructionResult::new(bytes, limit) }) } @@ -55,11 +54,10 @@ where &self, query: Query, limit: Option, - offset: Option, ) -> CostResult { let left_to_right = query.left_to_right; - self.prove_unchecked(query, limit, offset, left_to_right) - .map_ok(|(proof, limit, offset)| ProofWithoutEncodingResult::new(proof, limit, offset)) + self.prove_unchecked(query, limit, left_to_right) + .map_ok(|(proof, limit)| ProofWithoutEncodingResult::new(proof, limit)) } /// Creates a Merkle proof for the list of queried keys. For each key in @@ -78,7 +76,6 @@ where &self, query: I, limit: Option, - offset: Option, left_to_right: bool, ) -> CostResult where @@ -95,14 +92,46 @@ where .wrap_with_cost(Default::default()) .flat_map_ok(|tree| { let mut ref_walker = RefWalker::new(tree, self.source()); - ref_walker.create_proof(query_vec.as_slice(), limit, offset, left_to_right) + ref_walker.create_proof(query_vec.as_slice(), limit, left_to_right) }) - .map_ok(|(proof, _, limit, offset, ..)| (proof, limit, offset)) + .map_ok(|(proof, _, limit, ..)| (proof, limit)) + }) + } + + /// Creates a Merkle proof for the list of queried keys. For each key in + /// the query, if the key is found in the store then the value will be + /// proven to be in the tree. For each key in the query that does not + /// exist in the tree, its absence will be proven by including + /// boundary keys. + /// The proof returned is in an encoded format which can be verified with + /// `merk::verify`. + /// + /// This is unsafe because the keys in `query` must be sorted and unique - + /// if they are not, there will be undefined behavior. For a safe version + /// of this method which checks to ensure the batch is sorted and + /// unique, see `prove`. + pub fn prove_unchecked_query_items( + &self, + query_items: &[QueryItem], + limit: Option, + left_to_right: bool, + ) -> CostResult { + self.use_tree_mut(|maybe_tree| { + maybe_tree + .ok_or(Error::CorruptedCodeExecution( + "Cannot create proof for empty tree", + )) + .wrap_with_cost(Default::default()) + .flat_map_ok(|tree| { + let mut ref_walker = RefWalker::new(tree, self.source()); + ref_walker.create_proof(query_items, limit, left_to_right) + }) + .map_ok(|(proof, _, limit, ..)| (proof, limit)) }) } } -type Proof = (LinkedList, Option, Option); +type Proof = (LinkedList, Option); /// Proof construction result pub struct ProofConstructionResult { @@ -110,18 +139,12 @@ pub struct ProofConstructionResult { pub proof: Vec, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } impl ProofConstructionResult { /// New ProofConstructionResult - pub fn new(proof: Vec, limit: Option, offset: Option) -> Self { - Self { - proof, - limit, - offset, - } + pub fn new(proof: Vec, limit: Option) -> Self { + Self { proof, limit } } } @@ -131,17 +154,11 @@ pub struct ProofWithoutEncodingResult { pub proof: LinkedList, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } impl ProofWithoutEncodingResult { /// New ProofWithoutEncodingResult - pub fn new(proof: LinkedList, limit: Option, offset: Option) -> Self { - Self { - proof, - limit, - offset, - } + pub fn new(proof: LinkedList, limit: Option) -> Self { + Self { proof, limit } } } diff --git a/merk/src/merk/restore.rs b/merk/src/merk/restore.rs index 9e26b1afd..c5ce12862 100644 --- a/merk/src/merk/restore.rs +++ b/merk/src/merk/restore.rs @@ -710,10 +710,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -726,10 +723,8 @@ mod tests { // let's try to apply the second chunk again, should not work let (chunk, _) = chunk_producer.chunk_with_index(2).unwrap(); // apply second chunk - let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, LEFT]), - chunk, - ); + let chunk_process_result = + restorer.process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, LEFT]), chunk); assert!(chunk_process_result.is_err()); assert!(matches!( chunk_process_result, @@ -739,10 +734,8 @@ mod tests { // next let's get a random but expected chunk and work with that e.g. chunk 4 // but let's apply it to the wrong place let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap(); - let chunk_process_result = restorer.process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), - chunk, - ); + let chunk_process_result = + restorer.process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk); assert!(chunk_process_result.is_err()); assert!(matches!( chunk_process_result, @@ -755,10 +748,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(5).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![RIGHT, RIGHT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[RIGHT, RIGHT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -772,10 +762,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(3).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![LEFT, RIGHT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[LEFT, RIGHT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -789,10 +776,7 @@ mod tests { let (chunk, _) = chunk_producer.chunk_with_index(4).unwrap(); // apply second chunk let new_chunk_ids = restorer - .process_chunk( - &traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT]), - chunk, - ) + .process_chunk(&traversal_instruction_as_vec_bytes(&[RIGHT, LEFT]), chunk) .unwrap(); assert_eq!(new_chunk_ids.len(), 0); // chunk_map should have 1 less element @@ -1035,7 +1019,7 @@ mod tests { // first restore the first chunk let (chunk, next_chunk_index) = chunk_producer.chunk_with_index(1).unwrap(); let new_chunk_ids = restorer - .process_chunk(&traversal_instruction_as_vec_bytes(&vec![]), chunk) + .process_chunk(&traversal_instruction_as_vec_bytes(&[]), chunk) .expect("should process chunk"); assert_eq!(new_chunk_ids.len(), 4); assert_eq!(next_chunk_index, Some(2)); @@ -1273,7 +1257,7 @@ mod tests { // first restore the first chunk let (chunk, next_chunk_index) = chunk_producer.chunk_with_index(1).unwrap(); let new_chunk_ids = restorer - .process_chunk(&traversal_instruction_as_vec_bytes(&vec![]), chunk) + .process_chunk(&traversal_instruction_as_vec_bytes(&[]), chunk) .expect("should process chunk"); assert_eq!(new_chunk_ids.len(), 4); assert_eq!(next_chunk_index, Some(2)); diff --git a/merk/src/proofs/chunk.rs b/merk/src/proofs/chunk.rs index 223346883..063a35754 100644 --- a/merk/src/proofs/chunk.rs +++ b/merk/src/proofs/chunk.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Chunk proofs mod binary_range; diff --git a/merk/src/proofs/chunk/util.rs b/merk/src/proofs/chunk/util.rs index 39c513b77..fab2024a0 100644 --- a/merk/src/proofs/chunk/util.rs +++ b/merk/src/proofs/chunk/util.rs @@ -573,11 +573,11 @@ mod test { #[test] fn test_traversal_instruction_as_string() { - assert_eq!(traversal_instruction_as_vec_bytes(&vec![]), vec![]); - assert_eq!(traversal_instruction_as_vec_bytes(&vec![LEFT]), vec![1u8]); - assert_eq!(traversal_instruction_as_vec_bytes(&vec![RIGHT]), vec![0u8]); + assert_eq!(traversal_instruction_as_vec_bytes(&[]), vec![]); + assert_eq!(traversal_instruction_as_vec_bytes(&[LEFT]), vec![1u8]); + assert_eq!(traversal_instruction_as_vec_bytes(&[RIGHT]), vec![0u8]); assert_eq!( - traversal_instruction_as_vec_bytes(&vec![RIGHT, LEFT, LEFT, RIGHT]), + traversal_instruction_as_vec_bytes(&[RIGHT, LEFT, LEFT, RIGHT]), vec![0u8, 1u8, 1u8, 0u8] ); } @@ -585,20 +585,20 @@ mod test { #[test] fn test_instruction_string_to_traversal_instruction() { assert_eq!( - vec_bytes_as_traversal_instruction(&vec![1u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[1u8]).unwrap(), vec![LEFT] ); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![0u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[0u8]).unwrap(), vec![RIGHT] ); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![0u8, 0u8, 1u8]).unwrap(), + vec_bytes_as_traversal_instruction(&[0u8, 0u8, 1u8]).unwrap(), vec![RIGHT, RIGHT, LEFT] ); - assert!(vec_bytes_as_traversal_instruction(&vec![0u8, 0u8, 2u8]).is_err()); + assert!(vec_bytes_as_traversal_instruction(&[0u8, 0u8, 2u8]).is_err()); assert_eq!( - vec_bytes_as_traversal_instruction(&vec![]).unwrap(), + vec_bytes_as_traversal_instruction(&[]).unwrap(), Vec::::new() ); } diff --git a/merk/src/proofs/encoding.rs b/merk/src/proofs/encoding.rs index d0395fe74..eb1c055b2 100644 --- a/merk/src/proofs/encoding.rs +++ b/merk/src/proofs/encoding.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Proofs encoding #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/proofs/mod.rs b/merk/src/proofs/mod.rs index 1bedeec5e..45f4b2e9d 100644 --- a/merk/src/proofs/mod.rs +++ b/merk/src/proofs/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk proofs #[cfg(feature = "full")] @@ -104,9 +76,58 @@ pub enum Node { KVValueHash(Vec, Vec, CryptoHash), /// Represents, the key, value, value_hash and feature_type of a tree node + /// Used by Sum trees KVValueHashFeatureType(Vec, Vec, CryptoHash, TreeFeatureType), /// Represents the key, value of some referenced node and value_hash of /// current tree node KVRefValueHash(Vec, Vec, CryptoHash), } + +use std::fmt; + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for Node { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let node_string = match self { + Node::Hash(hash) => format!("Hash(HASH[{}])", hex::encode(hash)), + Node::KVHash(kv_hash) => format!("KVHash(HASH[{}])", hex::encode(kv_hash)), + Node::KV(key, value) => { + format!("KV({}, {})", hex_to_ascii(key), hex_to_ascii(value)) + } + Node::KVValueHash(key, value, value_hash) => format!( + "KVValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVDigest(key, value_hash) => format!( + "KVDigest({}, HASH[{}])", + hex_to_ascii(key), + hex::encode(value_hash) + ), + Node::KVRefValueHash(key, value, value_hash) => format!( + "KVRefValueHash({}, {}, HASH[{}])", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash) + ), + Node::KVValueHashFeatureType(key, value, value_hash, feature_type) => format!( + "KVValueHashFeatureType({}, {}, HASH[{}], {:?})", + hex_to_ascii(key), + hex_to_ascii(value), + hex::encode(value_hash), + feature_type + ), + }; + write!(f, "{}", node_string) + } +} + +fn hex_to_ascii(hex_value: &[u8]) -> String { + if hex_value.len() == 1 && hex_value[0] < b"0"[0] { + hex::encode(hex_value) + } else { + String::from_utf8(hex_value.to_vec()).unwrap_or_else(|_| hex::encode(hex_value)) + } +} diff --git a/merk/src/proofs/query/map.rs b/merk/src/proofs/query/map.rs index 9eb716ed9..757403a2d 100644 --- a/merk/src/proofs/query/map.rs +++ b/merk/src/proofs/query/map.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Query #![allow(unstable_name_collisions)] @@ -264,6 +236,38 @@ impl<'a> Iterator for Range<'a> { } } +#[cfg(feature = "full")] +/// `BTreeMapExtras` provides extra functionality to work with `BTreeMap` that +/// either missed or unstable +/// NOTE: We can easily remove this when the following feature will be rolled +/// out into stable rust: https://github.com/rust-lang/rust/issues/62924 +trait BTreeMapExtras { + type K; + type V; + + /// Returns `None` if `BTreeMap` is empty otherwise the first key-value pair + /// in the map. The key in this pair is the minimum key in the map. + fn first_key_value(&self) -> Option<(&Self::K, &Self::V)>; + + /// Returns `None` if `BTreeMap` is empty otherwise the last key-value pair + /// in the map. The key in this pair is the maximum key in the map. + fn last_key_value(&self) -> Option<(&Self::K, &Self::V)>; +} + +#[cfg(feature = "full")] +impl BTreeMapExtras for BTreeMap { + type K = KK; + type V = VV; + + fn first_key_value(&self) -> Option<(&Self::K, &Self::V)> { + self.iter().next() + } + + fn last_key_value(&self) -> Option<(&Self::K, &Self::V)> { + self.iter().next_back() + } +} + #[cfg(feature = "full")] #[cfg(test)] mod tests { @@ -396,35 +400,3 @@ mod tests { assert_eq!(range.next().unwrap().unwrap(), (&[1][..], &[1][..])); } } - -#[cfg(feature = "full")] -/// `BTreeMapExtras` provides extra functionality to work with `BTreeMap` that -/// either missed or unstable -/// NOTE: We can easily remove this when the following feature will be rolled -/// out into stable rust: https://github.com/rust-lang/rust/issues/62924 -trait BTreeMapExtras { - type K; - type V; - - /// Returns `None` if `BTreeMap` is empty otherwise the first key-value pair - /// in the map. The key in this pair is the minimum key in the map. - fn first_key_value(&self) -> Option<(&Self::K, &Self::V)>; - - /// Returns `None` if `BTreeMap` is empty otherwise the last key-value pair - /// in the map. The key in this pair is the maximum key in the map. - fn last_key_value(&self) -> Option<(&Self::K, &Self::V)>; -} - -#[cfg(feature = "full")] -impl BTreeMapExtras for BTreeMap { - type K = KK; - type V = VV; - - fn first_key_value(&self) -> Option<(&Self::K, &Self::V)> { - self.iter().next() - } - - fn last_key_value(&self) -> Option<(&Self::K, &Self::V)> { - self.iter().next_back() - } -} diff --git a/merk/src/proofs/query/mod.rs b/merk/src/proofs/query/mod.rs index 29296efc4..107a1ec80 100644 --- a/merk/src/proofs/query/mod.rs +++ b/merk/src/proofs/query/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Query proofs #[cfg(feature = "full")] @@ -42,11 +14,11 @@ pub mod query_item; #[cfg(any(feature = "full", feature = "verify"))] mod verify; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use std::cmp::Ordering; -use std::{collections::HashSet, ops::RangeFull}; +use std::{collections::HashSet, fmt, ops::RangeFull}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use grovedb_costs::{cost_return_on_error, CostContext, CostResult, CostsExt, OperationCost}; #[cfg(any(feature = "full", feature = "verify"))] use indexmap::IndexMap; @@ -56,17 +28,20 @@ pub use map::*; pub use query_item::intersect::QueryItemIntersectionResult; #[cfg(any(feature = "full", feature = "verify"))] pub use query_item::QueryItem; +#[cfg(feature = "full")] +use verify::ProofAbsenceLimit; #[cfg(any(feature = "full", feature = "verify"))] -use verify::ProofAbsenceLimitOffset; +pub use verify::VerifyOptions; #[cfg(any(feature = "full", feature = "verify"))] -pub use verify::{execute_proof, verify_query, ProofVerificationResult, ProvedKeyValue}; +pub use verify::{ProofVerificationResult, ProvedKeyOptionalValue, ProvedKeyValue}; #[cfg(feature = "full")] use {super::Op, std::collections::LinkedList}; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use super::Node; #[cfg(any(feature = "full", feature = "verify"))] use crate::error::Error; +use crate::proofs::hex_to_ascii; #[cfg(feature = "full")] use crate::tree::kv::ValueDefinedCostType; #[cfg(feature = "full")] @@ -109,6 +84,57 @@ pub struct Query { pub left_to_right: bool, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for SubqueryBranch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "SubqueryBranch {{ ")?; + if let Some(path) = &self.subquery_path { + write!(f, "subquery_path: [")?; + for (i, path_part) in path.iter().enumerate() { + if i > 0 { + write!(f, ", ")? + } + write!(f, "{}", hex_to_ascii(path_part))?; + } + write!(f, "], ")?; + } else { + write!(f, "subquery_path: None ")?; + } + if let Some(subquery) = &self.subquery { + write!(f, "subquery: {} ", subquery)?; + } else { + write!(f, "subquery: None ")?; + } + write!(f, "}}") + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for Query { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "Query {{")?; + writeln!(f, " items: [")?; + for item in &self.items { + writeln!(f, " {},", item)?; + } + writeln!(f, " ],")?; + writeln!( + f, + " default_subquery_branch: {},", + self.default_subquery_branch + )?; + if let Some(conditional_branches) = &self.conditional_subquery_branches { + writeln!(f, " conditional_subquery_branches: {{")?; + for (item, branch) in conditional_branches { + writeln!(f, " {}: {},", item, branch)?; + } + writeln!(f, " }},")?; + } + writeln!(f, " left_to_right: {},", self.left_to_right)?; + write!(f, "}}") + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl Query { /// Creates a new query which contains no items. @@ -116,6 +142,15 @@ impl Query { Self::new_with_direction(true) } + /// Creates a new query which contains all items. + pub fn new_range_full() -> Self { + Self { + items: vec![QueryItem::RangeFull(RangeFull)], + left_to_right: true, + ..Self::default() + } + } + /// Creates a new query which contains only one key. pub fn new_single_key(key: Vec) -> Self { Self { @@ -155,6 +190,37 @@ impl Query { } } + pub fn has_subquery_on_key(&self, key: &[u8], in_path: bool) -> bool { + if in_path || self.default_subquery_branch.subquery.is_some() { + return true; + } + if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { + for (query_item, subquery) in conditional_subquery_branches { + if query_item.contains(key) { + return subquery.subquery.is_some(); + } + } + } + false + } + + pub fn has_subquery_or_subquery_path_on_key(&self, key: &[u8], in_path: bool) -> bool { + if in_path + || self.default_subquery_branch.subquery.is_some() + || self.default_subquery_branch.subquery_path.is_some() + { + return true; + } + if let Some(conditional_subquery_branches) = self.conditional_subquery_branches.as_ref() { + for query_item in conditional_subquery_branches.keys() { + if query_item.contains(key) { + return true; + } + } + } + false + } + /// Pushes terminal key paths and keys to `result`, no more than /// `max_results`. Returns the number of terminal keys added. /// @@ -520,10 +586,9 @@ where &mut self, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { - self.create_proof(query, limit, offset, left_to_right) + ) -> CostResult { + self.create_proof(query, limit, left_to_right) } /// Generates a proof for the list of queried keys. Returns a tuple @@ -537,9 +602,8 @@ where &mut self, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { + ) -> CostResult { let mut cost = OperationCost::default(); // TODO: don't copy into vec, support comparing QI to byte slice @@ -557,8 +621,6 @@ where let current_node_in_query: bool; let mut node_on_non_inclusive_bounds = false; - // becomes true if the offset exists and is non zero - let mut skip_current_node = false; let (mut left_items, mut right_items) = match search { Ok(index) => { @@ -602,90 +664,65 @@ where } }; - if offset.is_none() || offset == Some(0) { - // when the limit hits zero, the rest of the query batch should be cleared - // so empty the left, right query batch, and set the current node to not found - if let Some(current_limit) = limit { - if current_limit == 0 { - left_items = &[]; - search = Err(Default::default()); - right_items = &[]; - } + // when the limit hits zero, the rest of the query batch should be cleared + // so empty the left, right query batch, and set the current node to not found + if let Some(current_limit) = limit { + if current_limit == 0 { + left_items = &[]; + search = Err(Default::default()); + right_items = &[]; } } let proof_direction = left_to_right; // signifies what direction the DFS should go - let (mut proof, left_absence, mut new_limit, mut new_offset) = if left_to_right { + let (mut proof, left_absence, mut new_limit) = if left_to_right { cost_return_on_error!( &mut cost, - self.create_child_proof(proof_direction, left_items, limit, offset, left_to_right) + self.create_child_proof(proof_direction, left_items, limit, left_to_right) ) } else { cost_return_on_error!( &mut cost, - self.create_child_proof(proof_direction, right_items, limit, offset, left_to_right) + self.create_child_proof(proof_direction, right_items, limit, left_to_right) ) }; - if let Some(current_offset) = new_offset { - if current_offset > 0 && current_node_in_query && !node_on_non_inclusive_bounds { - // reserve offset slot for current node before generating proof for right - // subtree - new_offset = Some(current_offset - 1); - skip_current_node = true; - } - } - - if !skip_current_node && (new_offset.is_none() || new_offset == Some(0)) { - if let Some(current_limit) = new_limit { - // if after generating proof for the left subtree, the limit becomes 0 - // clear the current node and clear the right batch - if current_limit == 0 { + if let Some(current_limit) = new_limit { + // if after generating proof for the left subtree, the limit becomes 0 + // clear the current node and clear the right batch + if current_limit == 0 { + if left_to_right { + right_items = &[]; + } else { + left_items = &[]; + } + search = Err(Default::default()); + } else if current_node_in_query && !node_on_non_inclusive_bounds { + // if limit is not zero, reserve a limit slot for the current node + // before generating proof for the right subtree + new_limit = Some(current_limit - 1); + // if after limit slot reservation, limit becomes 0, right query + // should be cleared + if current_limit - 1 == 0 { if left_to_right { right_items = &[]; } else { left_items = &[]; } - search = Err(Default::default()); - } else if current_node_in_query && !node_on_non_inclusive_bounds { - // if limit is not zero, reserve a limit slot for the current node - // before generating proof for the right subtree - new_limit = Some(current_limit - 1); - // if after limit slot reservation, limit becomes 0, right query - // should be cleared - if current_limit - 1 == 0 { - if left_to_right { - right_items = &[]; - } else { - left_items = &[]; - } - } } } } let proof_direction = !proof_direction; // search the opposite path on second pass - let (mut right_proof, right_absence, new_limit, new_offset) = if left_to_right { + let (mut right_proof, right_absence, new_limit) = if left_to_right { cost_return_on_error!( &mut cost, - self.create_child_proof( - proof_direction, - right_items, - new_limit, - new_offset, - left_to_right, - ) + self.create_child_proof(proof_direction, right_items, new_limit, left_to_right,) ) } else { cost_return_on_error!( &mut cost, - self.create_child_proof( - proof_direction, - left_items, - new_limit, - new_offset, - left_to_right, - ) + self.create_child_proof(proof_direction, left_items, new_limit, left_to_right,) ) }; @@ -693,7 +730,7 @@ where proof.push_back(match search { Ok(_) => { - if node_on_non_inclusive_bounds || skip_current_node { + if node_on_non_inclusive_bounds { if left_to_right { Op::Push(self.to_kvdigest_node()) } else { @@ -737,13 +774,7 @@ where } } - Ok(( - proof, - (left_absence.0, right_absence.1), - new_limit, - new_offset, - )) - .wrap_with_cost(cost) + Ok((proof, (left_absence.0, right_absence.1), new_limit)).wrap_with_cost(cost) } /// Similar to `create_proof`. Recurses into the child on the given side and @@ -754,16 +785,15 @@ where left: bool, query: &[QueryItem], limit: Option, - offset: Option, left_to_right: bool, - ) -> CostResult { + ) -> CostResult { if !query.is_empty() { self.walk(left, None::<&fn(&[u8]) -> Option>) .flat_map_ok(|child_opt| { if let Some(mut child) = child_opt { - child.create_proof(query, limit, offset, left_to_right) + child.create_proof(query, limit, left_to_right) } else { - Ok((LinkedList::new(), (true, true), limit, offset)) + Ok((LinkedList::new(), (true, true), limit)) .wrap_with_cost(Default::default()) } }) @@ -774,10 +804,9 @@ where } else { Op::PushInverted(link.to_hash_node()) }); - Ok((proof, (false, false), limit, offset)).wrap_with_cost(Default::default()) + Ok((proof, (false, false), limit)).wrap_with_cost(Default::default()) } else { - Ok((LinkedList::new(), (false, false), limit, offset)) - .wrap_with_cost(Default::default()) + Ok((LinkedList::new(), (false, false), limit)).wrap_with_cost(Default::default()) } } } @@ -787,32 +816,40 @@ where #[cfg(test)] mod test { + macro_rules! compare_result_tuples_not_optional { + ($result_set:expr, $expected_result_set:expr) => { + assert_eq!( + $expected_result_set.len(), + $result_set.len(), + "Result set lengths do not match" + ); + for i in 0..$expected_result_set.len() { + assert_eq!( + $expected_result_set[i].0, $result_set[i].key, + "Key mismatch at index {}", + i + ); + assert_eq!( + &$expected_result_set[i].1, + $result_set[i].value.as_ref().expect("expected value"), + "Value mismatch at index {}", + i + ); + } + }; + } + use super::{ super::{encoding::encode_into, *}, *, }; use crate::{ - proofs::query::{ - query_item::QueryItem::RangeAfter, - verify, - verify::{verify_query, ProvedKeyValue}, - }, + proofs::query::{query_item::QueryItem::RangeAfter, verify}, test_utils::make_tree_seq, tree::{NoopCommit, PanicSource, RefWalker, TreeNode}, TreeFeatureType::BasicMerkNode, }; - fn compare_result_tuples( - result_set: Vec, - expected_result_set: Vec<(Vec, Vec)>, - ) { - assert_eq!(expected_result_set.len(), result_set.len()); - for i in 0..expected_result_set.len() { - assert_eq!(expected_result_set[i].0, result_set[i].key); - assert_eq!(expected_result_set[i].1, result_set[i].value); - } - } - fn make_3_node_tree() -> TreeNode { let mut tree = TreeNode::new(vec![5], vec![5], None, BasicMerkNode) .unwrap() @@ -875,7 +912,6 @@ mod test { .collect::>() .as_slice(), None, - None, true, ) .unwrap() @@ -893,7 +929,8 @@ mod test { query.insert_key(key.clone()); } - let result = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let result = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .expect("verify failed"); @@ -905,7 +942,10 @@ mod test { } for (key, expected_value) in keys.iter().zip(expected_result.iter()) { - assert_eq!(values.get(key), expected_value.as_ref()); + assert_eq!( + values.get(key).and_then(|a| a.as_ref()), + expected_value.as_ref() + ); } } @@ -1105,7 +1145,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, absence, ..) = walker - .create_full_proof(vec![].as_slice(), None, None, true) + .create_full_proof(vec![].as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1138,16 +1178,10 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let res = verify_query( - bytes.as_slice(), - &Query::new(), - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); + let res = Query::new() + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); assert!(res.result_set.is_empty()); } @@ -1156,9 +1190,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![5])]; + let query_items = vec![QueryItem::Key(vec![5])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1196,20 +1230,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); } #[test] @@ -1217,9 +1245,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![3])]; + let query_items = vec![QueryItem::Key(vec![3])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1257,20 +1285,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![3], vec![3])]); } #[test] @@ -1278,9 +1300,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![3]), QueryItem::Key(vec![7])]; + let query_items = vec![QueryItem::Key(vec![3]), QueryItem::Key(vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1322,20 +1344,17 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3]), (vec![7], vec![7])]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![3], vec![3]), (vec![7], vec![7])] + ); } #[test] @@ -1343,13 +1362,13 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ QueryItem::Key(vec![3]), QueryItem::Key(vec![5]), QueryItem::Key(vec![7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1395,22 +1414,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![3], vec![3]), (vec![5], vec![5]), (vec![7], vec![7])] ); } @@ -1419,9 +1432,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![8])]; + let query_items = vec![QueryItem::Key(vec![8])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1458,20 +1471,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, Vec::<(Vec, Vec)>::new()); } #[test] @@ -1479,9 +1486,9 @@ mod test { let mut tree = make_3_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Key(vec![6])]; + let query_items = vec![QueryItem::Key(vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1521,20 +1528,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, Vec::<(Vec, Vec)>::new()); } #[test] @@ -1614,14 +1615,14 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ QueryItem::Key(vec![1]), QueryItem::Key(vec![2]), QueryItem::Key(vec![3]), QueryItem::Key(vec![4]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1715,27 +1716,21 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![1], vec![1]), (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), - ], + ] ); } @@ -1787,11 +1782,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -1868,162 +1863,50 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], - ); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::Range( - vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(198)); // right to left test let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { + let mut query = Query::new_with_direction(false); + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); } @@ -2032,11 +1915,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeInclusive( + let query_items = vec![QueryItem::RangeInclusive( vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2114,300 +1997,143 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); - // skip 1 element + // right_to_left proof let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeInclusive( + let query_items = vec![QueryItem::RangeInclusive( vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], )]; let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], + vec![ + (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), + (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), + (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), + ] ); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); + } - // skip 2 elements - let mut tree = make_tree_seq(10); + #[test] + fn range_from_proof() { + let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); + let mut iter = proof.iter(); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::Hash([ + 85, 217, 56, 226, 204, 53, 103, 145, 201, 33, 178, 80, 207, 194, 104, 128, 199, + 145, 156, 208, 152, 255, 209, 24, 140, 222, 204, 193, 211, 26, 118, 58 + ]))) + ); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![5], + vec![5], + [ + 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, 48, 45, 180, 21, + 158, 223, 88, 148, 181, 149, 189, 65, 121, 19, 81, 118, 11, 106 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Parent)); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![7], + vec![7], + [ + 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, 219, 89, 102, + 51, 109, 115, 127, 3, 152, 236, 147, 183, 100, 81, 123, 109, 244, 0 + ] + ))) + ); + assert_eq!( + iter.next(), + Some(&Op::Push(Node::KVValueHash( + vec![8], + vec![8], + [ + 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, 158, 189, 49, + 158, 250, 151, 137, 22, 160, 107, 216, 238, 129, 230, 199, 251, 197, 51 + ] + ))) + ); + assert_eq!(iter.next(), Some(&Op::Parent)); + assert_eq!(iter.next(), Some(&Op::Child)); + assert!(iter.next().is_none()); + assert_eq!(absence, (false, true)); + let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60])], - ); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); - - // right_to_left proof - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - - compare_result_tuples( - res.result_set, - vec![ - (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], - ); - - let mut tree = make_tree_seq(10); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeInclusive( - vec![0, 0, 0, 0, 0, 0, 0, 5]..=vec![0, 0, 0, 0, 0, 0, 0, 7], - )]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), None, Some(2), false) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - None, - Some(2), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - - compare_result_tuples( - res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60])], - ); - assert_eq!(res.limit, None); - assert_eq!(res.offset, Some(0)); - } - - #[test] - fn range_from_proof() { - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) - .unwrap() - .expect("create_proof errored"); - - let mut iter = proof.iter(); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::Hash([ - 85, 217, 56, 226, 204, 53, 103, 145, 201, 33, 178, 80, 207, 194, 104, 128, 199, - 145, 156, 208, 152, 255, 209, 24, 140, 222, 204, 193, 211, 26, 118, 58 - ]))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![5], - vec![5], - [ - 116, 30, 0, 135, 25, 118, 86, 14, 12, 107, 215, 214, 133, 122, 48, 45, 180, 21, - 158, 223, 88, 148, 181, 149, 189, 65, 121, 19, 81, 118, 11, 106 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![7], - vec![7], - [ - 63, 193, 78, 215, 236, 222, 32, 58, 144, 66, 94, 225, 145, 233, 219, 89, 102, - 51, 109, 115, 127, 3, 152, 236, 147, 183, 100, 81, 123, 109, 244, 0 - ] - ))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![8], - vec![8], - [ - 205, 24, 196, 78, 21, 130, 132, 58, 44, 29, 21, 175, 68, 254, 158, 189, 49, - 158, 250, 151, 137, 22, 160, 107, 216, 238, 129, 230, 199, 251, 197, 51 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!(iter.next(), Some(&Op::Child)); - assert!(iter.next().is_none()); - assert_eq!(absence, (false, true)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( - res.result_set, - vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], + vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::Key(vec![5])]; + let equivalent_query_items = vec![QueryItem::Key(vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2417,40 +2143,33 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![ + let equivalent_query_items = vec![ QueryItem::Key(vec![5]), QueryItem::Key(vec![6]), QueryItem::Key(vec![7]), ]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2460,36 +2179,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![7], vec![7])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![5], vec![5]), (vec![7], vec![7])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let equivalent_query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2499,123 +2214,26 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])], + vec![(vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8])] ); assert_eq!(res.limit, Some(97)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![8], vec![8])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); // right_to_left test let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![5]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -2624,54 +2242,17 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], + vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])] ); - - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![5]..)]; - let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(1), false) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(absence, (true, false)); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - Some(1), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7]), (vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); } #[test] @@ -2679,9 +2260,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2758,44 +2339,37 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2805,36 +2379,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -2844,167 +2411,66 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let equivalent_query_items = vec![QueryItem::RangeTo(..vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) - .unwrap() - .expect("create_proof errored"); - - assert_eq!(proof, equivalent_proof); - assert_eq!(absence, equivalent_absence); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( - res.result_set, - vec![ - (vec![2], vec![2]), - (vec![3], vec![3]), - (vec![4], vec![4]), - (vec![5], vec![5]), - ], - ); - assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); + assert_eq!(proof, equivalent_proof); + assert_eq!(absence, equivalent_absence); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![ + (vec![2], vec![2]), + (vec![3], vec![3]), + (vec![4], vec![4]), + (vec![5], vec![5]), + ] + ); + assert_eq!(res.limit, Some(96)); // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3013,35 +2479,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![5], vec![5]), (vec![4], vec![4]), (vec![3], vec![3]), (vec![2], vec![2]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeTo(..vec![6])]; + let query_items = vec![QueryItem::RangeTo(..vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, false) + .create_full_proof(query_items.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -3050,22 +2510,18 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); } #[test] @@ -3073,9 +2529,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3152,44 +2608,37 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3199,36 +2648,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3238,36 +2680,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3277,128 +2715,31 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), (vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5]), - ], + ] ); assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![3], vec![3])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3407,35 +2748,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![5], vec![5]), (vec![4], vec![4]), (vec![3], vec![3]), (vec![2], vec![2]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeToInclusive(..=vec![6])]; + let query_items = vec![QueryItem::RangeToInclusive(..=vec![6])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), false) + .create_full_proof(query_items.as_slice(), Some(1), false) .unwrap() .expect("create_proof errored"); @@ -3444,22 +2779,15 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![5], vec![5])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); } #[test] @@ -3467,9 +2795,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![RangeAfter(vec![3]..)]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3546,44 +2874,37 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3593,36 +2914,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3632,36 +2946,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; + let equivalent_query_items = vec![QueryItem::RangeAfter(vec![3]..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3671,128 +2981,31 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, Some(96)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfter(vec![3]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(196)); // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![RangeAfter(vec![3]..)]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -3801,35 +3014,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![RangeAfter(vec![3]..)]; + let query_items = vec![RangeAfter(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), None, false) + .create_full_proof(query_items.as_slice(), Some(3), false) .unwrap() .expect("create_proof errored"); @@ -3838,25 +3045,18 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(3), - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(3), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])], + vec![(vec![8], vec![8]), (vec![7], vec![7]), (vec![5], vec![5])] ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); } #[test] @@ -3864,9 +3064,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3938,36 +3138,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -3977,36 +3173,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4016,36 +3205,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let equivalent_query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4055,120 +3240,26 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); - assert_eq!(res.limit, Some(98)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(198)); + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); + assert_eq!(res.limit, Some(98)); // right_to_left let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4177,27 +3268,24 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])] + ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; + let query_items = vec![QueryItem::RangeAfterTo(vec![3]..vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(300), Some(1), false) + .create_full_proof(query_items.as_slice(), Some(300), false) .unwrap() .expect("create_proof errored"); @@ -4206,22 +3294,18 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(300), - Some(1), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); - assert_eq!(res.limit, Some(299)); - assert_eq!(res.offset, Some(0)); + let res = query + .verify_proof(bytes.as_slice(), Some(300), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![5], vec![5]), (vec![4], vec![4])] + ); + assert_eq!(res.limit, Some(298)); } #[test] @@ -4229,9 +3313,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4302,39 +3386,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![4])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4344,36 +3421,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![4], vec![4])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![5])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4383,36 +3453,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![4], vec![4]), (vec![5], vec![5])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let equivalent_query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4422,149 +3488,45 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])], + vec![(vec![4], vec![4]), (vec![5], vec![5]), (vec![7], vec![7])] ); assert_eq!(res.limit, Some(97)); - assert_eq!(res.offset, None); - // skip 1 element + // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(1), true) + let query_items = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; + let (proof, absence, ..) = walker + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) - .unwrap() - .expect("create_proof errored"); + assert_eq!(absence, (false, false)); let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![7], vec![7])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(197)); - - // right_to_left proof - // let mut tree = make_6_node_tree(); - // let mut walker = RefWalker::new(&mut tree, PanicSource {}); - // - // let queryitems = - // vec![QueryItem::RangeAfterToInclusive(vec![3]..=vec![7])]; - // let (proof, absence, ..) = walker - // .create_full_proof(queryitems.as_slice(), None, None, false) - // .unwrap() - // .expect("create_proof errored"); - // - // assert_eq!(absence, (false, false)); - // - // let mut bytes = vec![]; - // encode_into(proof.iter(), &mut bytes); - // let mut query = Query::new(); - // for item in queryitems { - // query.insert_item(item); - // } - // let res = verify_query( - // bytes.as_slice(), - // &query, - // None, - // None, - // false, - // tree.hash().unwrap(), - // ) - // .unwrap() - // .unwrap(); - // compare_result_tuples( - // res.result_set, - // vec![(vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4])], - // ); + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![7], vec![7]), (vec![5], vec![5]), (vec![4], vec![4])] + ); } #[test] @@ -4572,9 +3534,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4657,20 +3619,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), @@ -4679,24 +3635,23 @@ mod test { (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, None); - assert_eq!(res.offset, None); // Limit result set to 1 item let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![2])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![2])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4706,36 +3661,29 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 2 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), None, true) + .create_full_proof(query_items.as_slice(), Some(2), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeToInclusive(..=vec![3])]; + let equivalent_query_items = vec![QueryItem::RangeToInclusive(..=vec![3])]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4745,36 +3693,32 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2]), (vec![3], vec![3])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![2], vec![2]), (vec![3], vec![3])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); // Limit result set to 100 items let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(100), None, true) + .create_full_proof(query_items.as_slice(), Some(100), true) .unwrap() .expect("create_proof errored"); - let equivalent_queryitems = vec![QueryItem::RangeFull(..)]; + let equivalent_query_items = vec![QueryItem::RangeFull(..)]; let (equivalent_proof, equivalent_absence, ..) = walker - .create_full_proof(equivalent_queryitems.as_slice(), None, None, true) + .create_full_proof(equivalent_query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -4784,20 +3728,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(100), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), Some(100), true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![2], vec![2]), @@ -4806,111 +3744,17 @@ mod test { (vec![5], vec![5]), (vec![7], vec![7]), (vec![8], vec![8]), - ], + ] ); assert_eq!(res.limit, Some(94)); - assert_eq!(res.offset, None); - - // skip 1 element - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(3), Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(3), - Some(1), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( - res.result_set, - vec![(vec![3], vec![3]), (vec![4], vec![4]), (vec![5], vec![5])], - ); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip 2 elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(2), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4]), (vec![5], vec![5])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); - - // skip all elements - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFull(..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(200), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(200), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![]); - assert_eq!(res.limit, Some(1)); - assert_eq!(res.offset, Some(194)); // right_to_left proof let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -4919,20 +3763,14 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), @@ -4941,15 +3779,15 @@ mod test { (vec![4], vec![4]), (vec![3], vec![3]), (vec![2], vec![2]), - ], + ] ); let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFull(..)]; + let query_items = vec![QueryItem::RangeFull(..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(2), Some(2), false) + .create_full_proof(query_items.as_slice(), Some(2), false) .unwrap() .expect("create_proof errored"); @@ -4958,22 +3796,18 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(2), - Some(2), - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![5], vec![5]), (vec![4], vec![4])]); + let res = query + .verify_proof(bytes.as_slice(), Some(2), false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( + res.result_set, + vec![(vec![8], vec![8]), (vec![7], vec![7])] + ); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); } #[test] @@ -4981,15 +3815,14 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; - let (proof, _, limit, offset) = walker - .create_full_proof(queryitems.as_slice(), Some(1), None, true) + let query_items = vec![QueryItem::RangeFrom(vec![2]..)]; + let (proof, _, limit) = walker + .create_full_proof(query_items.as_slice(), Some(1), true) .unwrap() .expect("create_proof errored"); // TODO: Add this test for other range types assert_eq!(limit, Some(0)); - assert_eq!(offset, None); let mut iter = proof.iter(); assert_eq!( @@ -5040,106 +3873,15 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![2], vec![2])]); - assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, None); - } - - #[test] - fn proof_with_offset() { - let mut tree = make_6_node_tree(); - let mut walker = RefWalker::new(&mut tree, PanicSource {}); - - let queryitems = vec![QueryItem::RangeFrom(vec![2]..)]; - let (proof, ..) = walker - .create_full_proof(queryitems.as_slice(), Some(1), Some(2), true) + let res = query + .verify_proof(bytes.as_slice(), Some(1), true, tree.hash().unwrap()) .unwrap() - .expect("create_proof errored"); - - let mut iter = proof.iter(); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVDigest( - vec![2], - [ - 183, 215, 112, 4, 15, 120, 14, 157, 239, 246, 188, 3, 138, 190, 166, 110, 16, - 139, 136, 208, 152, 209, 109, 36, 205, 116, 134, 235, 103, 16, 96, 178 - ] - ))) - ); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVDigest( - vec![3], - [ - 210, 173, 26, 11, 185, 253, 244, 69, 11, 216, 113, 81, 192, 139, 153, 104, 205, - 4, 107, 218, 102, 84, 170, 189, 186, 36, 48, 176, 169, 129, 231, 144 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVValueHash( - vec![4], - vec![4], - [ - 198, 129, 51, 156, 134, 199, 7, 21, 172, 89, 146, 71, 4, 16, 82, 205, 89, 51, - 227, 215, 139, 195, 237, 202, 159, 191, 209, 172, 156, 38, 239, 192 - ] - ))) - ); - assert_eq!(iter.next(), Some(&Op::Child)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::KVHash([ - 61, 233, 169, 61, 231, 15, 78, 53, 219, 99, 131, 45, 44, 165, 68, 87, 7, 52, 238, - 68, 142, 211, 110, 161, 111, 220, 108, 11, 17, 31, 88, 197 - ]))) - ); - assert_eq!(iter.next(), Some(&Op::Parent)); - assert_eq!( - iter.next(), - Some(&Op::Push(Node::Hash([ - 133, 188, 175, 131, 60, 89, 221, 135, 133, 53, 205, 110, 58, 56, 128, 58, 1, 227, - 75, 122, 83, 20, 125, 44, 149, 44, 62, 130, 252, 134, 105, 200 - ]))) - ); - assert_eq!(iter.next(), Some(&Op::Child)); - assert!(iter.next().is_none()); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { - query.insert_item(item); - } - let res = verify_query( - bytes.as_slice(), - &query, - Some(1), - Some(2), - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples(res.result_set, vec![(vec![4], vec![4])]); + .unwrap(); + compare_result_tuples_not_optional!(res.result_set, vec![(vec![2], vec![2])]); assert_eq!(res.limit, Some(0)); - assert_eq!(res.offset, Some(0)); } #[test] @@ -5147,9 +3889,9 @@ mod test { let mut tree = make_6_node_tree(); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::RangeFrom(vec![3]..)]; + let query_items = vec![QueryItem::RangeFrom(vec![3]..)]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, false) + .create_full_proof(query_items.as_slice(), None, false) .unwrap() .expect("create_proof errored"); @@ -5227,21 +3969,15 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); - let mut query = Query::new(); - for item in queryitems { + let mut query = Query::new_with_direction(false); + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - false, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, false, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![8], vec![8]), @@ -5249,7 +3985,7 @@ mod test { (vec![5], vec![5]), (vec![4], vec![4]), (vec![3], vec![3]), - ], + ] ); } @@ -5258,11 +3994,11 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5]..vec![0, 0, 0, 0, 0, 0, 0, 6, 5], )]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5339,25 +4075,19 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), - ], + ] ); } @@ -5366,12 +4096,12 @@ mod test { let mut tree = make_tree_seq(10); let mut walker = RefWalker::new(&mut tree, PanicSource {}); - let queryitems = vec![ + let query_items = vec![ // 7 is not inclusive QueryItem::Range(vec![0, 0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7]), ]; let (proof, absence, ..) = walker - .create_full_proof(queryitems.as_slice(), None, None, true) + .create_full_proof(query_items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5447,22 +4177,16 @@ mod test { let mut bytes = vec![]; encode_into(proof.iter(), &mut bytes); let mut query = Query::new(); - for item in queryitems { + for item in query_items { query.insert_item(item); } - let res = verify_query( - bytes.as_slice(), - &query, - None, - None, - true, - tree.hash().unwrap(), - ) - .unwrap() - .unwrap(); - compare_result_tuples( + let res = query + .verify_proof(bytes.as_slice(), None, true, tree.hash().unwrap()) + .unwrap() + .unwrap(); + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], + vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])] ); } @@ -5477,7 +4201,7 @@ mod test { query.insert_all(); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5488,14 +4212,15 @@ mod test { let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 6]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); assert_eq!(res.result_set.len(), 1); - compare_result_tuples( + compare_result_tuples_not_optional!( res.result_set, - vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])], + vec![(vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60])] ); // 1..10 prove (2..=5, 7..10) subset (3..=4, 7..=8) @@ -5503,7 +4228,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); query.insert_range(vec![0, 0, 0, 0, 0, 0, 0, 7]..vec![0, 0, 0, 0, 0, 0, 0, 10]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5513,19 +4238,20 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 3]..=vec![0, 0, 0, 0, 0, 0, 0, 4]); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 7]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); assert_eq!(res.result_set.len(), 4); - compare_result_tuples( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 8], vec![123; 60]), - ], + ] ); // 1..10 prove (2..=5, 6..10) subset (4..=8) @@ -5533,7 +4259,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); query.insert_range(vec![0, 0, 0, 0, 0, 0, 0, 6]..vec![0, 0, 0, 0, 0, 0, 0, 10]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5542,12 +4268,13 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 4]..=vec![0, 0, 0, 0, 0, 0, 0, 8]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), @@ -5555,7 +4282,7 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 6], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 7], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 8], vec![123; 60]), - ], + ] ); // 1..10 prove (1..=3, 2..=5) subset (1..=5) @@ -5563,7 +4290,7 @@ mod test { query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 3]); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 2]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, None, true) + .create_full_proof(query.items.as_slice(), None, true) .unwrap() .expect("create_proof errored"); @@ -5572,12 +4299,13 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), None, true, expected_hash) .unwrap() .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5585,14 +4313,14 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); // 1..10 prove full (..) limit to 5, subset (1..=5) let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), Some(5), None, true) + .create_full_proof(query.items.as_slice(), Some(5), true) .unwrap() .expect("create_proof errored"); @@ -5601,12 +4329,13 @@ mod test { let mut query = Query::new(); query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, Some(5), None, true, expected_hash) + let res = query + .verify_proof(bytes.as_slice(), Some(5), true, expected_hash) .unwrap() .unwrap(); assert_eq!(res.result_set.len(), 5); - compare_result_tuples( + compare_result_tuples_not_optional!( res.result_set, vec![ (vec![0, 0, 0, 0, 0, 0, 0, 1], vec![123; 60]), @@ -5614,35 +4343,7 @@ mod test { (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], - ); - - // 1..10 prove full (..) limit to 5, subset (1..=5) - let mut query = Query::new(); - query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), None, Some(1), true) - .unwrap() - .expect("create_proof errored"); - - let mut bytes = vec![]; - encode_into(proof.iter(), &mut bytes); - - let mut query = Query::new(); - query.insert_range_inclusive(vec![0, 0, 0, 0, 0, 0, 0, 1]..=vec![0, 0, 0, 0, 0, 0, 0, 5]); - let res = verify_query(bytes.as_slice(), &query, None, Some(1), true, expected_hash) - .unwrap() - .unwrap(); - - assert_eq!(res.result_set.len(), 4); - compare_result_tuples( - res.result_set, - vec![ - (vec![0, 0, 0, 0, 0, 0, 0, 2], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 3], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 4], vec![123; 60]), - (vec![0, 0, 0, 0, 0, 0, 0, 5], vec![123; 60]), - ], + ] ); } @@ -5665,7 +4366,7 @@ mod test { let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); let (proof, ..) = walker - .create_full_proof(query.items.as_slice(), Some(3), None, true) + .create_full_proof(query.items.as_slice(), Some(3), true) .unwrap() .expect("create_proof errored"); @@ -5675,38 +4376,35 @@ mod test { // Try to query 4 let mut query = Query::new(); query.insert_key(vec![0, 0, 0, 0, 0, 0, 0, 4]); - assert!( - verify_query(bytes.as_slice(), &query, Some(3), None, true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(3), true, expected_hash) + .unwrap() + .is_err()); // if limit offset parameters are different from generation then proof // verification returns an error Try superset proof with increased limit let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - assert!( - verify_query(bytes.as_slice(), &query, Some(4), None, true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(4), true, expected_hash) + .unwrap() + .is_err()); // Try superset proof with less limit let mut query = Query::new(); query.insert_range_from(vec![0, 0, 0, 0, 0, 0, 0, 1]..); - assert!( - verify_query(bytes.as_slice(), &query, Some(2), None, true, expected_hash) - .unwrap() - .is_err() - ); + assert!(query + .verify_proof(bytes.as_slice(), Some(2), true, expected_hash) + .unwrap() + .is_err()); } #[test] fn query_from_vec() { - let queryitems = vec![QueryItem::Range( + let query_items = vec![QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; - let query = Query::from(queryitems); + let query = Query::from(query_items); let mut expected = Vec::new(); expected.push(QueryItem::Range( @@ -5722,7 +4420,7 @@ mod test { vec![0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )); let query_vec: Vec = query.into(); - let expected = vec![QueryItem::Range( + let expected = [QueryItem::Range( vec![0, 0, 0, 0, 0, 0, 5, 5]..vec![0, 0, 0, 0, 0, 0, 0, 7], )]; assert_eq!( @@ -5737,8 +4435,8 @@ mod test { #[test] fn query_item_from_vec_u8() { - let queryitems: Vec = vec![42]; - let query = QueryItem::from(queryitems); + let query_items: Vec = vec![42]; + let query = QueryItem::from(query_items); let expected = QueryItem::Key(vec![42]); assert_eq!(query, expected); @@ -5755,7 +4453,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, ..) = walker - .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, None, true) + .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, true) .unwrap() .expect("failed to create proof"); let mut bytes = vec![]; @@ -5780,7 +4478,7 @@ mod test { let mut walker = RefWalker::new(&mut tree, PanicSource {}); let (proof, ..) = walker - .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, None, true) + .create_full_proof(vec![QueryItem::Key(vec![5])].as_slice(), None, true) .unwrap() .expect("failed to create proof"); let mut bytes = vec![]; @@ -5806,7 +4504,6 @@ mod test { .collect::>() .as_slice(), None, - None, true, ) .unwrap() @@ -5819,7 +4516,8 @@ mod test { query.insert_key(key.clone()); } - let _result = verify_query(bytes.as_slice(), &query, None, None, true, [42; 32]) + let _result = query + .verify_proof(bytes.as_slice(), None, true, [42; 32]) .unwrap() .expect("verify failed"); } diff --git a/merk/src/proofs/query/query_item/mod.rs b/merk/src/proofs/query/query_item/mod.rs index 63f3cc0ac..7c81a27e4 100644 --- a/merk/src/proofs/query/query_item/mod.rs +++ b/merk/src/proofs/query/query_item/mod.rs @@ -5,17 +5,19 @@ mod merge; use std::{ cmp, cmp::Ordering, + fmt, hash::Hash, ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}, }; -#[cfg(any(feature = "full", feature = "verify"))] +#[cfg(feature = "full")] use grovedb_costs::{CostContext, CostsExt, OperationCost}; #[cfg(feature = "full")] use grovedb_storage::RawIterator; #[cfg(any(feature = "full", feature = "verify"))] use crate::error::Error; +use crate::proofs::hex_to_ascii; #[cfg(any(feature = "full", feature = "verify"))] /// A `QueryItem` represents a key or range of keys to be included in a proof. @@ -33,6 +35,50 @@ pub enum QueryItem { RangeAfterToInclusive(RangeInclusive>), } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for QueryItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + QueryItem::Key(key) => write!(f, "Key({})", hex_to_ascii(key)), + QueryItem::Range(range) => write!( + f, + "Range({} .. {})", + hex_to_ascii(&range.start), + hex_to_ascii(&range.end) + ), + QueryItem::RangeInclusive(range) => write!( + f, + "RangeInclusive({} ..= {})", + hex_to_ascii(range.start()), + hex_to_ascii(range.end()) + ), + QueryItem::RangeFull(_) => write!(f, "RangeFull"), + QueryItem::RangeFrom(range) => { + write!(f, "RangeFrom({} ..)", hex_to_ascii(&range.start)) + } + QueryItem::RangeTo(range) => write!(f, "RangeTo(.. {})", hex_to_ascii(&range.end)), + QueryItem::RangeToInclusive(range) => { + write!(f, "RangeToInclusive(..= {})", hex_to_ascii(&range.end)) + } + QueryItem::RangeAfter(range) => { + write!(f, "RangeAfter({} <..)", hex_to_ascii(&range.start)) + } + QueryItem::RangeAfterTo(range) => write!( + f, + "RangeAfterTo({} <.. {})", + hex_to_ascii(&range.start), + hex_to_ascii(&range.end) + ), + QueryItem::RangeAfterToInclusive(range) => write!( + f, + "RangeAfterToInclusive({} <..= {})", + hex_to_ascii(range.start()), + hex_to_ascii(range.end()) + ), + } + } +} + #[cfg(any(feature = "full", feature = "verify"))] impl Hash for QueryItem { fn hash(&self, state: &mut H) { @@ -381,7 +427,7 @@ impl QueryItem { } #[cfg(any(feature = "full", feature = "verify"))] - fn compare(a: &[u8], b: &[u8]) -> cmp::Ordering { + pub fn compare(a: &[u8], b: &[u8]) -> cmp::Ordering { for (ai, bi) in a.iter().zip(b.iter()) { match ai.cmp(bi) { Ordering::Equal => continue, diff --git a/merk/src/proofs/query/verify.rs b/merk/src/proofs/query/verify.rs index 39ff471ad..e1d565114 100644 --- a/merk/src/proofs/query/verify.rs +++ b/merk/src/proofs/query/verify.rs @@ -1,18 +1,23 @@ +#[cfg(feature = "full")] use std::collections::LinkedList; +use std::fmt; use grovedb_costs::{cost_return_on_error, CostResult, CostsExt, OperationCost}; #[cfg(feature = "full")] -use crate::proofs::query::{Map, MapBuilder}; +use crate::proofs::{ + query::{Map, MapBuilder}, + Op, +}; use crate::{ error::Error, - proofs::{tree::execute, Decoder, Node, Op, Query}, + proofs::{hex_to_ascii, tree::execute, Decoder, Node, Query}, tree::value_hash, CryptoHash as MerkHash, CryptoHash, }; -#[cfg(any(feature = "full", feature = "verify"))] -pub type ProofAbsenceLimitOffset = (LinkedList, (bool, bool), Option, Option); +#[cfg(feature = "full")] +pub type ProofAbsenceLimit = (LinkedList, (bool, bool), Option); #[cfg(feature = "full")] /// Verify proof against expected hash @@ -37,266 +42,406 @@ pub fn verify(bytes: &[u8], expected_hash: MerkHash) -> CostResult { }) } -#[cfg(any(feature = "full", feature = "verify"))] -/// Verifies the encoded proof with the given query -/// -/// Every key in `keys` is checked to either have a key/value pair in the proof, -/// or to have its absence in the tree proven. -/// -/// Returns `Err` if the proof is invalid, or a list of proven values associated -/// with `keys`. For example, if `keys` contains keys `A` and `B`, the returned -/// list will contain 2 elements, the value of `A` and the value of `B`. Keys -/// proven to be absent in the tree will have an entry of `None`, keys that have -/// a proven value will have an entry of `Some(value)`. -pub fn execute_proof( - bytes: &[u8], - query: &Query, - limit: Option, - offset: Option, - left_to_right: bool, -) -> CostResult<(MerkHash, ProofVerificationResult), Error> { - let mut cost = OperationCost::default(); - - let mut output = Vec::with_capacity(query.len()); - let mut last_push = None; - let mut query = query.directional_iter(left_to_right).peekable(); - let mut in_range = false; - let mut current_limit = limit; - let mut current_offset = offset; +#[derive(Copy, Clone, Debug)] +pub struct VerifyOptions { + /// When set to true, this will give back absence proofs for any query items + /// that are keys. This means QueryItem::Key(), and not the ranges. + pub absence_proofs_for_non_existing_searched_keys: bool, + /// Verifies that we have all the data. Todo: verify that this works + /// properly + pub verify_proof_succinctness: bool, + /// Should return empty trees in the result? + pub include_empty_trees_in_result: bool, +} - let ops = Decoder::new(bytes); +impl Default for VerifyOptions { + fn default() -> Self { + VerifyOptions { + absence_proofs_for_non_existing_searched_keys: true, + verify_proof_succinctness: true, + include_empty_trees_in_result: false, + } + } +} - let root_wrapped = execute(ops, true, |node| { - let mut execute_node = |key: &Vec, - value: Option<&Vec>, - value_hash: CryptoHash| - -> Result<_, Error> { - while let Some(item) = query.peek() { - // get next item in query - let query_item = *item; - let (lower_bound, start_non_inclusive) = query_item.lower_bound(); - let (upper_bound, end_inclusive) = query_item.upper_bound(); - - // terminate if we encounter a node before the current query item. - // this means a node less than the current query item for left to right. - // and a node greater than the current query item for right to left. - let terminate = if left_to_right { - // if the query item is lower unbounded, then a node cannot be less than it. - // checks that the lower bound of the query item not greater than the key - // if they are equal make sure the start is inclusive - !query_item.lower_unbounded() - && ((lower_bound.expect("confirmed not unbounded") > key.as_slice()) - || (start_non_inclusive - && lower_bound.expect("confirmed not unbounded") == key.as_slice())) +impl Query { + #[cfg(any(feature = "full", feature = "verify"))] + /// Verifies the encoded proof with the given query + /// + /// Every key in `keys` is checked to either have a key/value pair in the + /// proof, or to have its absence in the tree proven. + /// + /// Returns `Err` if the proof is invalid, or a list of proven values + /// associated with `keys`. For example, if `keys` contains keys `A` and + /// `B`, the returned list will contain 2 elements, the value of `A` and + /// the value of `B`. Keys proven to be absent in the tree will have an + /// entry of `None`, keys that have a proven value will have an entry of + /// `Some(value)`. + pub fn execute_proof( + &self, + bytes: &[u8], + limit: Option, + left_to_right: bool, + ) -> CostResult<(MerkHash, ProofVerificationResult), Error> { + #[cfg(feature = "proof_debug")] + { + println!( + "executing proof with limit {:?} going {} using query {}", + limit, + if left_to_right { + "left to right" } else { - !query_item.upper_unbounded() - && ((upper_bound.expect("confirmed not unbounded") < key.as_slice()) - || (!end_inclusive - && upper_bound.expect("confirmed not unbounded") == key.as_slice())) - }; - if terminate { - break; - } + "right to left" + }, + self + ); + } + let mut cost = OperationCost::default(); - if !in_range { - // this is the first data we have encountered for this query item - if left_to_right { - // ensure lower bound of query item is proven - match last_push { - // lower bound is proven - we have an exact match - // ignoring the case when the lower bound is unbounded - // as it's not possible the get an exact key match for - // an unbounded value - _ if Some(key.as_slice()) == query_item.lower_bound().0 => {} - - // lower bound is proven - this is the leftmost node - // in the tree - None => {} - - // lower bound is proven - the preceding tree node - // is lower than the bound - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // cannot verify lower bound - we have an abridged - // tree so we cannot tell what the preceding key was - Some(_) => { - return Err(Error::InvalidProofError( - "Cannot verify lower bound of queried range".to_string(), - )); - } - } + let mut output = Vec::with_capacity(self.len()); + let mut last_push = None; + let mut query = self.directional_iter(left_to_right).peekable(); + let mut in_range = false; + let original_limit = limit; + let mut current_limit = limit; + + let ops = Decoder::new(bytes); + + let root_wrapped = execute(ops, true, |node| { + let mut execute_node = |key: &Vec, + value: Option<&Vec>, + value_hash: CryptoHash| + -> Result<_, Error> { + while let Some(item) = query.peek() { + // get next item in query + let query_item = *item; + let (lower_bound, start_non_inclusive) = query_item.lower_bound(); + let (upper_bound, end_inclusive) = query_item.upper_bound(); + + // terminate if we encounter a node before the current query item. + // this means a node less than the current query item for left to right. + // and a node greater than the current query item for right to left. + let terminate = if left_to_right { + // if the query item is lower unbounded, then a node cannot be less than it. + // checks that the lower bound of the query item not greater than the key + // if they are equal make sure the start is inclusive + !query_item.lower_unbounded() + && ((lower_bound.expect("confirmed not unbounded") > key.as_slice()) + || (start_non_inclusive + && lower_bound.expect("confirmed not unbounded") + == key.as_slice())) } else { - // ensure upper bound of query item is proven - match last_push { - // upper bound is proven - we have an exact match - // ignoring the case when the upper bound is unbounded - // as it's not possible the get an exact key match for - // an unbounded value - _ if Some(key.as_slice()) == query_item.upper_bound().0 => {} - - // lower bound is proven - this is the rightmost node - // in the tree - None => {} - - // upper bound is proven - the preceding tree node - // is greater than the bound - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // cannot verify upper bound - we have an abridged - // tree so we cannot tell what the previous key was - Some(_) => { - return Err(Error::InvalidProofError( - "Cannot verify upper bound of queried range".to_string(), - )); + !query_item.upper_unbounded() + && ((upper_bound.expect("confirmed not unbounded") < key.as_slice()) + || (!end_inclusive + && upper_bound.expect("confirmed not unbounded") + == key.as_slice())) + }; + if terminate { + break; + } + + if !in_range { + // this is the first data we have encountered for this query item + if left_to_right { + // ensure lower bound of query item is proven + match last_push { + // lower bound is proven - we have an exact match + // ignoring the case when the lower bound is unbounded + // as it's not possible the get an exact key match for + // an unbounded value + _ if Some(key.as_slice()) == query_item.lower_bound().0 => {} + + // lower bound is proven - this is the leftmost node + // in the tree + None => {} + + // lower bound is proven - the preceding tree node + // is lower than the bound + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} + + // cannot verify lower bound - we have an abridged + // tree so we cannot tell what the preceding key was + Some(_) => { + return Err(Error::InvalidProofError( + "Cannot verify lower bound of queried range".to_string(), + )); + } + } + } else { + // ensure upper bound of query item is proven + match last_push { + // upper bound is proven - we have an exact match + // ignoring the case when the upper bound is unbounded + // as it's not possible the get an exact key match for + // an unbounded value + _ if Some(key.as_slice()) == query_item.upper_bound().0 => {} + + // lower bound is proven - this is the rightmost node + // in the tree + None => {} + + // upper bound is proven - the preceding tree node + // is greater than the bound + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} + + // cannot verify upper bound - we have an abridged + // tree so we cannot tell what the previous key was + Some(_) => { + return Err(Error::InvalidProofError( + "Cannot verify upper bound of queried range".to_string(), + )); + } } } } - } - if left_to_right { - if query_item.upper_bound().0.is_some() - && Some(key.as_slice()) >= query_item.upper_bound().0 + if left_to_right { + if query_item.upper_bound().0.is_some() + && Some(key.as_slice()) >= query_item.upper_bound().0 + { + // at or past upper bound of range (or this was an exact + // match on a single-key queryitem), advance to next query + // item + query.next(); + in_range = false; + } else { + // have not reached upper bound, we expect more values + // to be proven in the range (and all pushes should be + // unabridged until we reach end of range) + in_range = true; + } + } else if query_item.lower_bound().0.is_some() + && Some(key.as_slice()) <= query_item.lower_bound().0 { - // at or past upper bound of range (or this was an exact + // at or before lower bound of range (or this was an exact // match on a single-key queryitem), advance to next query // item query.next(); in_range = false; } else { - // have not reached upper bound, we expect more values + // have not reached lower bound, we expect more values // to be proven in the range (and all pushes should be // unabridged until we reach end of range) in_range = true; } - } else if query_item.lower_bound().0.is_some() - && Some(key.as_slice()) <= query_item.lower_bound().0 - { - // at or before lower bound of range (or this was an exact - // match on a single-key queryitem), advance to next query - // item - query.next(); - in_range = false; - } else { - // have not reached lower bound, we expect more values - // to be proven in the range (and all pushes should be - // unabridged until we reach end of range) - in_range = true; - } - // this push matches the queried item - if query_item.contains(key) { - // if there are still offset slots, and node is of type kvdigest - // reduce the offset counter - // also, verify that a kv node was not pushed before offset is exhausted - if let Some(offset) = current_offset { - if offset > 0 && value.is_none() { - current_offset = Some(offset - 1); + // this push matches the queried item + if query_item.contains(key) { + if let Some(val) = value { + if let Some(limit) = current_limit { + if limit == 0 { + return Err(Error::InvalidProofError(format!( + "Proof returns more data than limit {:?}", + original_limit + ))); + } else { + current_limit = Some(limit - 1); + if current_limit == Some(0) { + in_range = false; + } + } + } + #[cfg(feature = "proof_debug")] + { + println!( + "pushing {}", + ProvedKeyOptionalValue { + key: key.clone(), + value: Some(val.clone()), + proof: value_hash, + } + ); + } + // add data to output + output.push(ProvedKeyOptionalValue { + key: key.clone(), + value: Some(val.clone()), + proof: value_hash, + }); + + // continue to next push break; - } else if offset > 0 && value.is_some() { - // inserting a kv node before exhausting offset + } else { return Err(Error::InvalidProofError( - "Proof returns data before offset is exhausted".to_string(), + "Proof is missing data for query".to_string(), )); } } + {} + // continue to next queried item + } + Ok(()) + }; - // offset is equal to zero or none - if let Some(val) = value { - if let Some(limit) = current_limit { - if limit == 0 { - return Err(Error::InvalidProofError( - "Proof returns more data than limit".to_string(), - )); - } else { - current_limit = Some(limit - 1); - if current_limit == Some(0) { - in_range = false; - } - } - } - // add data to output - output.push(ProvedKeyValue { - key: key.clone(), - value: val.clone(), - proof: value_hash, - }); - - // continue to next push - break; - } else { - return Err(Error::InvalidProofError( - "Proof is missing data for query".to_string(), - )); + match node { + Node::KV(key, value) => { + #[cfg(feature = "proof_debug")] + { + println!("Processing KV node"); + } + execute_node(key, Some(value), value_hash(value).unwrap())?; + } + Node::KVValueHash(key, value, value_hash) => { + #[cfg(feature = "proof_debug")] + { + println!("Processing KVValueHash node"); + } + execute_node(key, Some(value), *value_hash)?; + } + Node::KVDigest(key, value_hash) => { + #[cfg(feature = "proof_debug")] + { + println!("Processing KVDigest node"); + } + execute_node(key, None, *value_hash)?; + } + Node::KVRefValueHash(key, value, value_hash) => { + #[cfg(feature = "proof_debug")] + { + println!("Processing KVRefValueHash node"); + } + execute_node(key, Some(value), *value_hash)?; + } + Node::Hash(_) | Node::KVHash(_) | Node::KVValueHashFeatureType(..) => { + if in_range { + return Err(Error::InvalidProofError(format!( + "Proof is missing data for query range. Encountered unexpected node \ + type: {}", + node + ))); } } - {} - // continue to next queried item } - Ok(()) - }; - if let Node::KV(key, value) = node { - execute_node(key, Some(value), value_hash(value).unwrap())?; - } else if let Node::KVValueHash(key, value, value_hash) = node { - execute_node(key, Some(value), *value_hash)?; - } else if let Node::KVDigest(key, value_hash) = node { - execute_node(key, None, *value_hash)?; - } else if let Node::KVRefValueHash(key, value, value_hash) = node { - execute_node(key, Some(value), *value_hash)?; - } else if in_range { - // we encountered a queried range but the proof was abridged (saw a - // non-KV push), we are missing some part of the range - return Err(Error::InvalidProofError( - "Proof is missing data for query for range".to_string(), - )); - } + last_push = Some(node.clone()); - last_push = Some(node.clone()); + Ok(()) + }); - Ok(()) - }); + let root = cost_return_on_error!(&mut cost, root_wrapped); - let root = cost_return_on_error!(&mut cost, root_wrapped); + // we have remaining query items, check absence proof against right edge of + // tree + if query.peek().is_some() { + if current_limit == Some(0) { + } else { + match last_push { + // last node in tree was less than queried item + Some(Node::KV(..)) => {} + Some(Node::KVDigest(..)) => {} + Some(Node::KVRefValueHash(..)) => {} + Some(Node::KVValueHash(..)) => {} - // we have remaining query items, check absence proof against right edge of - // tree - if query.peek().is_some() { - if current_limit == Some(0) { - } else { - match last_push { - // last node in tree was less than queried item - Some(Node::KV(..)) => {} - Some(Node::KVDigest(..)) => {} - Some(Node::KVRefValueHash(..)) => {} - Some(Node::KVValueHash(..)) => {} - - // proof contains abridged data so we cannot verify absence of - // remaining query items - _ => { - return Err(Error::InvalidProofError( - "Proof is missing data for query".to_string(), - )) - .wrap_with_cost(cost) + // proof contains abridged data so we cannot verify absence of + // remaining query items + _ => { + return Err(Error::InvalidProofError( + "Proof is missing data for query".to_string(), + )) + .wrap_with_cost(cost) + } } } } + + Ok(( + root.hash().unwrap_add_cost(&mut cost), + ProofVerificationResult { + result_set: output, + limit: current_limit, + }, + )) + .wrap_with_cost(cost) } - Ok(( - root.hash().unwrap_add_cost(&mut cost), - ProofVerificationResult { - result_set: output, - limit: current_limit, - offset: current_offset, - }, - )) - .wrap_with_cost(cost) + #[cfg(any(feature = "full", feature = "verify"))] + /// Verifies the encoded proof with the given query and expected hash + pub fn verify_proof( + &self, + bytes: &[u8], + limit: Option, + left_to_right: bool, + expected_hash: MerkHash, + ) -> CostResult { + self.execute_proof(bytes, limit, left_to_right) + .map_ok(|(root_hash, verification_result)| { + if root_hash == expected_hash { + Ok(verification_result) + } else { + Err(Error::InvalidProofError(format!( + "Proof did not match expected hash\n\tExpected: \ + {expected_hash:?}\n\tActual: {root_hash:?}" + ))) + } + }) + .flatten() + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +#[derive(PartialEq, Eq, Debug)] +/// Proved key-value +pub struct ProvedKeyOptionalValue { + /// Key + pub key: Vec, + /// Value + pub value: Option>, + /// Proof + pub proof: CryptoHash, +} + +impl From for ProvedKeyOptionalValue { + fn from(value: ProvedKeyValue) -> Self { + let ProvedKeyValue { key, value, proof } = value; + + ProvedKeyOptionalValue { + key, + value: Some(value), + proof, + } + } +} + +impl TryFrom for ProvedKeyValue { + type Error = Error; + + fn try_from(value: ProvedKeyOptionalValue) -> Result { + let ProvedKeyOptionalValue { key, value, proof } = value; + let value = value.ok_or(Error::InvalidProofError(format!( + "expected {}", + hex_to_ascii(&key) + )))?; + Ok(ProvedKeyValue { key, value, proof }) + } +} + +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedKeyOptionalValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let key_string = if self.key.len() == 1 && self.key[0] < b"0"[0] { + hex::encode(&self.key) + } else { + String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)) + }; + write!( + f, + "ProvedKeyOptionalValue {{ key: {}, value: {}, proof: {} }}", + key_string, + if let Some(value) = &self.value { + hex::encode(value) + } else { + "None".to_string() + }, + hex::encode(self.proof) + ) + } } #[cfg(any(feature = "full", feature = "verify"))] @@ -311,38 +456,39 @@ pub struct ProvedKeyValue { pub proof: CryptoHash, } +#[cfg(any(feature = "full", feature = "verify"))] +impl fmt::Display for ProvedKeyValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "ProvedKeyValue {{ key: {}, value: {}, proof: {} }}", + String::from_utf8(self.key.clone()).unwrap_or_else(|_| hex::encode(&self.key)), + hex::encode(&self.value), + hex::encode(self.proof) + ) + } +} + #[cfg(any(feature = "full", feature = "verify"))] #[derive(PartialEq, Eq, Debug)] /// Proof verification result pub struct ProofVerificationResult { /// Result set - pub result_set: Vec, + pub result_set: Vec, /// Limit pub limit: Option, - /// Offset - pub offset: Option, } #[cfg(any(feature = "full", feature = "verify"))] -/// Verifies the encoded proof with the given query and expected hash -pub fn verify_query( - bytes: &[u8], - query: &Query, - limit: Option, - offset: Option, - left_to_right: bool, - expected_hash: MerkHash, -) -> CostResult { - execute_proof(bytes, query, limit, offset, left_to_right) - .map_ok(|(root_hash, verification_result)| { - if root_hash == expected_hash { - Ok(verification_result) - } else { - Err(Error::InvalidProofError(format!( - "Proof did not match expected hash\n\tExpected: {expected_hash:?}\n\tActual: \ - {root_hash:?}" - ))) - } - }) - .flatten() +impl fmt::Display for ProofVerificationResult { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(f, "ProofVerificationResult {{")?; + writeln!(f, " result_set: [")?; + for (index, proved_key_value) in self.result_set.iter().enumerate() { + writeln!(f, " {}: {},", index, proved_key_value)?; + } + writeln!(f, " ],")?; + writeln!(f, " limit: {:?}", self.limit)?; + write!(f, "}}") + } } diff --git a/merk/src/proofs/tree.rs b/merk/src/proofs/tree.rs index b3bf9cf1c..16655a6dc 100644 --- a/merk/src/proofs/tree.rs +++ b/merk/src/proofs/tree.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Tree proofs #[cfg(feature = "full")] diff --git a/merk/src/test_utils/mod.rs b/merk/src/test_utils/mod.rs index 49a492e2f..d5d766735 100644 --- a/merk/src/test_utils/mod.rs +++ b/merk/src/test_utils/mod.rs @@ -164,7 +164,6 @@ pub fn apply_to_memonly( }) .unwrap() .expect("commit failed"); - println!("{:?}", &tree); assert_tree_invariants(&tree); tree }) diff --git a/merk/src/tree/commit.rs b/merk/src/tree/commit.rs index 24c1d996d..31b0df5c6 100644 --- a/merk/src/tree/commit.rs +++ b/merk/src/tree/commit.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree commit #[cfg(feature = "full")] diff --git a/merk/src/tree/encoding.rs b/merk/src/tree/encoding.rs index 29307246e..3a97c8956 100644 --- a/merk/src/tree/encoding.rs +++ b/merk/src/tree/encoding.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree encoding #[cfg(feature = "full")] diff --git a/merk/src/tree/fuzz_tests.rs b/merk/src/tree/fuzz_tests.rs index 631918ff8..2f3067d1e 100644 --- a/merk/src/tree/fuzz_tests.rs +++ b/merk/src/tree/fuzz_tests.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Fuzz tests #![cfg(tests)] diff --git a/merk/src/tree/hash.rs b/merk/src/tree/hash.rs index d6d45c9f2..e23566a9a 100644 --- a/merk/src/tree/hash.rs +++ b/merk/src/tree/hash.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree hash #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/tree/iter.rs b/merk/src/tree/iter.rs index 6ca58df72..03cca6eaf 100644 --- a/merk/src/tree/iter.rs +++ b/merk/src/tree/iter.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree iterator #[cfg(feature = "full")] diff --git a/merk/src/tree/kv.rs b/merk/src/tree/kv.rs index ff020abc5..b10733fc3 100644 --- a/merk/src/tree/kv.rs +++ b/merk/src/tree/kv.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree key-values #[cfg(feature = "full")] diff --git a/merk/src/tree/link.rs b/merk/src/tree/link.rs index fa0d15639..f445dd11b 100644 --- a/merk/src/tree/link.rs +++ b/merk/src/tree/link.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree link #[cfg(feature = "full")] diff --git a/merk/src/tree/mod.rs b/merk/src/tree/mod.rs index 401b87226..9a29dc8e7 100644 --- a/merk/src/tree/mod.rs +++ b/merk/src/tree/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk trees #[cfg(feature = "full")] diff --git a/merk/src/tree/ops.rs b/merk/src/tree/ops.rs index da481d0a7..738a89df4 100644 --- a/merk/src/tree/ops.rs +++ b/merk/src/tree/ops.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree ops #[cfg(feature = "full")] diff --git a/merk/src/tree/tree_feature_type.rs b/merk/src/tree/tree_feature_type.rs index c1fceed3d..c47fb0d60 100644 --- a/merk/src/tree/tree_feature_type.rs +++ b/merk/src/tree/tree_feature_type.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree feature type #[cfg(any(feature = "full", feature = "verify"))] diff --git a/merk/src/tree/walk/fetch.rs b/merk/src/tree/walk/fetch.rs index 08b66d999..e99df5bd9 100644 --- a/merk/src/tree/walk/fetch.rs +++ b/merk/src/tree/walk/fetch.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Walk #[cfg(feature = "full")] diff --git a/merk/src/tree/walk/mod.rs b/merk/src/tree/walk/mod.rs index e54018146..adf2a07dc 100644 --- a/merk/src/tree/walk/mod.rs +++ b/merk/src/tree/walk/mod.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk tree walk #[cfg(feature = "full")] diff --git a/merk/src/tree/walk/ref_walker.rs b/merk/src/tree/walk/ref_walker.rs index d9fb1bcd0..17f4e6c4a 100644 --- a/merk/src/tree/walk/ref_walker.rs +++ b/merk/src/tree/walk/ref_walker.rs @@ -1,31 +1,3 @@ -// MIT LICENSE -// -// Copyright (c) 2021 Dash Core Group -// -// Permission is hereby granted, free of charge, to any -// person obtaining a copy of this software and associated -// documentation files (the "Software"), to deal in the -// Software without restriction, including without -// limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software -// is furnished to do so, subject to the following -// conditions: -// -// The above copyright notice and this permission notice -// shall be included in all copies or substantial portions -// of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -// DEALINGS IN THE SOFTWARE. - //! Merk reference walker #[cfg(feature = "full")] diff --git a/tutorials/src/bin/proofs.rs b/tutorials/src/bin/proofs.rs index 173b700d9..d56abbda7 100644 --- a/tutorials/src/bin/proofs.rs +++ b/tutorials/src/bin/proofs.rs @@ -33,7 +33,7 @@ fn main() { .expect("expected successful get_path_query"); // Generate proof. - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); // Get hash from query proof and print to terminal along with GroveDB root hash. let (hash, _result_set) = GroveDb::verify_query(&proof, &path_query).unwrap(); diff --git a/tutorials/src/bin/replication.rs b/tutorials/src/bin/replication.rs index bfdc1782c..6b5f0626b 100644 --- a/tutorials/src/bin/replication.rs +++ b/tutorials/src/bin/replication.rs @@ -229,7 +229,7 @@ fn query_db(db: &GroveDb, path: &[&[u8]], key: Vec) { println!(">> {:?}", e); } - let proof = db.prove_query(&path_query).unwrap().unwrap(); + let proof = db.prove_query(&path_query, None).unwrap().unwrap(); // Get hash from query proof and print to terminal along with GroveDB root hash. let (verify_hash, _) = GroveDb::verify_query(&proof, &path_query).unwrap(); println!("verify_hash: {:?}", hex::encode(verify_hash));