From d82adadc20add980cb293f5950304819b473064f Mon Sep 17 00:00:00 2001 From: Stent Date: Thu, 18 Jan 2024 17:38:43 +0000 Subject: [PATCH 01/12] Unit tests for DapolTree --- src/dapol_config.rs | 2 +- src/dapol_tree.rs | 148 ++++++++++++++++++++++++++++++++++++++++---- src/entity.rs | 2 +- src/main.rs | 2 +- 4 files changed, 140 insertions(+), 14 deletions(-) diff --git a/src/dapol_config.rs b/src/dapol_config.rs index fb2ae3b9..315b5236 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -727,7 +727,7 @@ mod tests { assert_eq!(dapol_tree.accumulator_type(), AccumulatorType::NdmSmt); assert_eq!(*dapol_tree.height(), height); assert_eq!(*dapol_tree.master_secret(), master_secret); - assert_eq!(dapol_tree.max_liability(), MaxLiability::default()); + assert_eq!(dapol_tree.max_liability(), &MaxLiability::default()); assert_eq!(*dapol_tree.salt_b(), salt_b); assert_eq!(*dapol_tree.salt_s(), salt_s); } diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 69ae95b3..49b17206 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; use crate::{ accumulators::{Accumulator, AccumulatorType, NdmSmt, NdmSmtError}, - read_write_utils::{self, ReadWriteError}, + read_write_utils::{self}, utils::LogOnErr, AggregationFactor, Entity, EntityId, Height, InclusionProof, MaxLiability, MaxThreadCount, Salt, Secret, @@ -197,8 +197,8 @@ impl DapolTree { } #[doc = include_str!("./shared_docs/max_liability.md")] - pub fn max_liability(&self) -> MaxLiability { - self.max_liability + pub fn max_liability(&self) -> &MaxLiability { + &self.max_liability } #[doc = include_str!("./shared_docs/height.md")] @@ -243,13 +243,15 @@ impl DapolTree { match path.extension() { Some(ext) => { if ext != SERIALIZED_TREE_EXTENSION { - Err(ReadWriteError::UnsupportedFileExtension { + Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { expected: SERIALIZED_TREE_EXTENSION.to_owned(), actual: ext.to_os_string(), })?; } } - None => Err(ReadWriteError::NotAFile(path.clone().into_os_string()))?, + None => Err(read_write_utils::ReadWriteError::NotAFile( + path.clone().into_os_string(), + ))?, } let dapol_tree: DapolTree = @@ -289,7 +291,9 @@ impl DapolTree { /// let dir = PathBuf::from("./"); /// let path = DapolTree::parse_serialization_path(dir).unwrap(); /// ``` - pub fn parse_serialization_path(path: PathBuf) -> Result { + pub fn parse_serialization_path( + path: PathBuf, + ) -> Result { read_write_utils::parse_serialization_path( path, SERIALIZED_TREE_EXTENSION, @@ -304,7 +308,7 @@ impl DapolTree { /// An error is returned if /// 1. [bincode] fails to serialize the file. /// 2. There is an issue opening or writing the file. - pub fn serialize(&self, path: PathBuf) -> Result<(), DapolTreeError> { + pub fn serialize(&self, path: PathBuf) -> Result { let path = DapolTree::parse_serialization_path(path)?; info!( @@ -312,8 +316,9 @@ impl DapolTree { path.clone().into_os_string() ); - read_write_utils::serialize_to_bin_file(self, path).log_on_err()?; - Ok(()) + read_write_utils::serialize_to_bin_file(self, path.clone()).log_on_err()?; + + Ok(path) } } @@ -324,10 +329,131 @@ impl DapolTree { #[derive(thiserror::Error, Debug)] pub enum DapolTreeError { #[error("Error serializing/deserializing file")] - SerdeError(#[from] ReadWriteError), + SerdeError(#[from] read_write_utils::ReadWriteError), #[error("Error constructing a new NDM-SMT")] NdmSmtConstructionError(#[from] NdmSmtError), } // ------------------------------------------------------------------------------------------------- -// STENT TODO test + +#[cfg(test)] +mod tests { + use super::*; + use crate::utils::test_utils::assert_err; + use crate::{ + AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, MaxThreadCount, Salt, + Secret, + }; + use std::path::PathBuf; + use std::str::FromStr; + + #[test] + fn constructor_and_getters_work() { + let accumulator_type = AccumulatorType::NdmSmt; + let height = Height::expect_from(8); + let salt_b = Salt::from_str("salt_b").unwrap(); + let salt_s = Salt::from_str("salt_s").unwrap(); + let master_secret = Secret::from_str("master_secret").unwrap(); + let max_liability = MaxLiability::from(10_000_000); + let max_thread_count = MaxThreadCount::from(8); + + let entity = Entity { + liability: 1u64, + id: EntityId::from_str("id").unwrap(), + }; + let entities = vec![entity.clone()]; + + let tree = DapolTree::new( + accumulator_type.clone(), + master_secret.clone(), + salt_b.clone(), + salt_s.clone(), + max_liability.clone(), + max_thread_count.clone(), + height.clone(), + entities, + ) + .unwrap(); + + assert_eq!(tree.master_secret(), &master_secret); + assert_eq!(tree.height(), &height); + assert_eq!(tree.max_liability(), &max_liability); + assert_eq!(tree.salt_b(), &salt_b); + assert_eq!(tree.salt_s(), &salt_s); + assert_eq!(tree.accumulator_type(), accumulator_type); + + assert!(tree.entity_mapping().is_some()); + assert!(tree.entity_mapping().unwrap().get(&entity.id).is_some()); + } + + fn new_tree() -> DapolTree { + let accumulator_type = AccumulatorType::NdmSmt; + let height = Height::expect_from(8); + let salt_b = Salt::from_str("salt_b").unwrap(); + let salt_s = Salt::from_str("salt_s").unwrap(); + let master_secret = Secret::from_str("master_secret").unwrap(); + let max_liability = MaxLiability::from(10_000_000); + let max_thread_count = MaxThreadCount::from(8); + + let entity = Entity { + liability: 1u64, + id: EntityId::from_str("id").unwrap(), + }; + let entities = vec![entity.clone()]; + + DapolTree::new( + accumulator_type.clone(), + master_secret.clone(), + salt_b.clone(), + salt_s.clone(), + max_liability.clone(), + max_thread_count.clone(), + height.clone(), + entities, + ) + .unwrap() + } + + #[test] + fn serde_does_not_change_tree() { + let tree = new_tree(); + let path = PathBuf::from_str("./mytree.myext").unwrap(); + let path = tree.serialize(path).unwrap(); + let tree_2 = DapolTree::deserialize(path).unwrap(); + + assert_eq!(tree.master_secret(), tree_2.master_secret()); + assert_eq!(tree.height(), tree_2.height()); + assert_eq!(tree.max_liability(), tree_2.max_liability()); + assert_eq!(tree.salt_b(), tree_2.salt_b()); + assert_eq!(tree.salt_s(), tree_2.salt_s()); + assert_eq!(tree.accumulator_type(), tree_2.accumulator_type()); + assert_eq!(tree.entity_mapping(), tree_2.entity_mapping()); + } + + #[test] + fn serialization_path_parser_fails_for_unsupported_extensions() { + let path = PathBuf::from_str("./mytree.myext").unwrap(); + + let res = DapolTree::parse_serialization_path(path); + assert_err!( + res, + Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { + expected: _, + actual: _ + }) + ); + } + + #[test] + fn serialization_path_parser_gives_correct_file_prefix() { + let path = PathBuf::from_str("./").unwrap(); + let path = DapolTree::parse_serialization_path(path).unwrap(); + assert!(path.to_str().unwrap().contains("proof_of_liabilities_merkle_sum_tree_")); + } + + #[test] + fn generate_inclusion_proof_works() { + let tree = new_tree(); + assert!(tree.generate_inclusion_proof(&EntityId::from_str("id").unwrap()).is_ok()); + } +} diff --git a/src/entity.rs b/src/entity.rs index a29a938a..d16dfe0d 100644 --- a/src/entity.rs +++ b/src/entity.rs @@ -24,7 +24,7 @@ pub use entity_ids_parser::{EntityIdsParser, EntityIdsParserError}; /// chosen above 'user' because it has a more general connotation. /// /// The entity struct has only 2 fields: ID and liability. -#[derive(Debug, Deserialize, PartialEq)] +#[derive(Debug, Clone, Deserialize, PartialEq)] pub struct Entity { pub liability: u64, pub id: EntityId, diff --git a/src/main.rs b/src/main.rs index 87f31425..35e05920 100644 --- a/src/main.rs +++ b/src/main.rs @@ -87,7 +87,7 @@ fn main() { .if_none_then(|| { debug!("No serialization path set, skipping serialization of the tree"); }) - .consume(|path| dapol_tree.serialize(path).unwrap()); + .consume(|path| { dapol_tree.serialize(path).unwrap(); }); if let Some(patharg) = gen_proofs { let entity_ids = EntityIdsParser::from( From 4d03f4c9be7eb62d1c4d9283e5a2239b743f39f7 Mon Sep 17 00:00:00 2001 From: Stent Date: Fri, 19 Jan 2024 09:33:31 +0000 Subject: [PATCH 02/12] Some smol unit tests & docs --- src/accumulators/ndm_smt.rs | 72 +++++++---------------- src/dapol_tree.rs | 33 ++++++++--- src/inclusion_proof.rs | 22 +++---- src/max_liability.rs | 14 +++++ src/shared_docs/aggregation_factor.md | 1 + src/shared_docs/upper_bound_bit_length.md | 1 + 6 files changed, 73 insertions(+), 70 deletions(-) create mode 100644 src/shared_docs/aggregation_factor.md create mode 100644 src/shared_docs/upper_bound_bit_length.md diff --git a/src/accumulators/ndm_smt.rs b/src/accumulators/ndm_smt.rs index e15f8685..17d10e2d 100644 --- a/src/accumulators/ndm_smt.rs +++ b/src/accumulators/ndm_smt.rs @@ -15,8 +15,7 @@ use crate::{ }, entity::{Entity, EntityId}, inclusion_proof::{AggregationFactor, InclusionProof}, - kdf::generate_key, - MaxThreadCount, Salt, Secret, DEFAULT_RANGE_PROOF_UPPER_BOUND_BIT_LENGTH, + kdf, MaxThreadCount, Salt, Secret, }; mod x_coord_generator; @@ -135,10 +134,11 @@ impl NdmSmt { .map(|(entity, x_coord)| { // `w` is the letter used in the DAPOL+ paper. let entity_secret: [u8; 32] = - generate_key(None, master_secret_bytes, Some(&x_coord.to_le_bytes())) + kdf::generate_key(None, master_secret_bytes, Some(&x_coord.to_le_bytes())) .into(); - let blinding_factor = generate_key(Some(salt_b_bytes), &entity_secret, None); - let entity_salt = generate_key(Some(salt_s_bytes), &entity_secret, None); + let blinding_factor = + kdf::generate_key(Some(salt_b_bytes), &entity_secret, None); + let entity_salt = kdf::generate_key(Some(salt_s_bytes), &entity_secret, None); InputLeafNode { content: Content::new_leaf( @@ -207,14 +207,9 @@ impl NdmSmt { /// are aggregated. Those that do not form part of the aggregated proof /// are just proved individually. The aggregation is a feature of the /// Bulletproofs protocol that improves efficiency. - /// - `upper_bound_bit_length` is used to determine the upper bound for the - /// range proof, which is set to `2^upper_bound_bit_length` i.e. the - /// range proof shows `0 <= liability <= 2^upper_bound_bit_length` for - /// some liability. The type is set to `u8` because we are not expected - /// to require bounds higher than $2^256$. Note that if the value is set - /// to anything other than 8, 16, 32 or 64 the Bulletproofs code will return - /// an Err. - pub fn generate_inclusion_proof_with( + /// - `upper_bound_bit_length`: + #[doc = include_str!("../shared_docs/upper_bound_bit_length.md")] + pub fn generate_inclusion_proof( &self, master_secret: &Secret, salt_b: &Salt, @@ -249,48 +244,17 @@ impl NdmSmt { )?) } - /// Generate an inclusion proof for the given entity_id. - /// - /// Use the default values for Bulletproof parameters: - /// - `aggregation_factor`: half of all the range proofs are aggregated - /// - `upper_bound_bit_length`: 64 (which should be plenty enough for most - /// real-world cases) - /// - /// Parameters: - /// - `master_secret`: - #[doc = include_str!("../shared_docs/master_secret.md")] - /// - `salt_b`: - #[doc = include_str!("../shared_docs/salt_b.md")] - /// - `salt_s`: - #[doc = include_str!("../shared_docs/salt_s.md")] - /// - `entity_id`: unique ID for the entity that the proof will be generated for. - pub fn generate_inclusion_proof( - &self, - master_secret: &Secret, - salt_b: &Salt, - salt_s: &Salt, - entity_id: &EntityId, - ) -> Result { - self.generate_inclusion_proof_with( - master_secret, - salt_b, - salt_s, - entity_id, - AggregationFactor::default(), - DEFAULT_RANGE_PROOF_UPPER_BOUND_BIT_LENGTH, - ) - } - /// Return the hash digest/bytes of the root node for the binary tree. pub fn root_hash(&self) -> H256 { self.binary_tree.root().content.hash } - /// Return the entity mapping, the x-coord that each entity is mapped to. + /// Hash map giving the x-coord that each entity is mapped to. pub fn entity_mapping(&self) -> &HashMap { &self.entity_mapping } + #[doc = include_str!("../shared_docs/height.md")] pub fn height(&self) -> &Height { self.binary_tree.height() } @@ -312,10 +276,10 @@ fn new_padding_node_content_closure( // copying let coord_bytes = coord.to_bytes(); // pad_secret is given as 'w' in the DAPOL+ paper - let pad_secret = generate_key(None, &master_secret_bytes, Some(&coord_bytes)); + let pad_secret = kdf::generate_key(None, &master_secret_bytes, Some(&coord_bytes)); let pad_secret_bytes: [u8; 32] = pad_secret.into(); - let blinding_factor = generate_key(Some(&salt_b_bytes), &pad_secret_bytes, None); - let salt = generate_key(Some(&salt_s_bytes), &pad_secret_bytes, None); + let blinding_factor = kdf::generate_key(Some(&salt_b_bytes), &pad_secret_bytes, None); + let salt = kdf::generate_key(Some(&salt_s_bytes), &pad_secret_bytes, None); Content::new_pad(blinding_factor.into(), coord, salt.into()) } } @@ -367,6 +331,14 @@ mod tests { id: EntityId::from_str("some entity").unwrap(), }]; - NdmSmt::new(master_secret, salt_b, salt_s, height, max_thread_count, entities).unwrap(); + NdmSmt::new( + master_secret, + salt_b, + salt_s, + height, + max_thread_count, + entities, + ) + .unwrap(); } } diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 49b17206..3a32c22e 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -135,17 +135,17 @@ impl DapolTree { /// Generate an inclusion proof for the given `entity_id`. /// - /// `aggregation_factor` is used to determine how many of the range proofs - /// are aggregated. Those that do not form part of the aggregated proof - /// are just proved individually. The aggregation is a feature of the - /// Bulletproofs protocol that improves efficiency. + /// Parameters: + /// - `entity_id`: unique ID for the entity that the proof will be generated for. + /// - `aggregation_factor`: + #[doc = include_str!("./shared_docs/aggregation_factor.md")] pub fn generate_inclusion_proof_with( &self, entity_id: &EntityId, aggregation_factor: AggregationFactor, ) -> Result { match &self.accumulator { - Accumulator::NdmSmt(ndm_smt) => ndm_smt.generate_inclusion_proof_with( + Accumulator::NdmSmt(ndm_smt) => ndm_smt.generate_inclusion_proof( &self.master_secret, &self.salt_b, &self.salt_s, @@ -157,6 +157,9 @@ impl DapolTree { } /// Generate an inclusion proof for the given `entity_id`. + /// + /// Parameters: + /// - `entity_id`: unique ID for the entity that the proof will be generated for. pub fn generate_inclusion_proof( &self, entity_id: &EntityId, @@ -167,6 +170,8 @@ impl DapolTree { &self.salt_b, &self.salt_s, entity_id, + AggregationFactor::default(), + self.max_liability.as_range_proof_upper_bound_bit_length(), ), } } @@ -448,12 +453,26 @@ mod tests { fn serialization_path_parser_gives_correct_file_prefix() { let path = PathBuf::from_str("./").unwrap(); let path = DapolTree::parse_serialization_path(path).unwrap(); - assert!(path.to_str().unwrap().contains("proof_of_liabilities_merkle_sum_tree_")); + assert!(path + .to_str() + .unwrap() + .contains("proof_of_liabilities_merkle_sum_tree_")); } #[test] fn generate_inclusion_proof_works() { let tree = new_tree(); - assert!(tree.generate_inclusion_proof(&EntityId::from_str("id").unwrap()).is_ok()); + assert!(tree + .generate_inclusion_proof(&EntityId::from_str("id").unwrap()) + .is_ok()); + } + + #[test] + fn generate_inclusion_proof_with_aggregation_factor_works() { + let tree = new_tree(); + let agg = AggregationFactor::Divisor(2u8); + assert!(tree + .generate_inclusion_proof_with(&EntityId::from_str("id").unwrap(), agg) + .is_ok()); } } diff --git a/src/inclusion_proof.rs b/src/inclusion_proof.rs index 0cfefce2..cc5c3588 100644 --- a/src/inclusion_proof.rs +++ b/src/inclusion_proof.rs @@ -73,20 +73,16 @@ pub struct InclusionProof { } impl InclusionProof { - /// Generate an inclusion proof from a tree path. + /// Generate an inclusion proof from the tree path siblings. /// - /// `aggregation_factor` is used to determine how many of the range proofs - /// are aggregated. Those that do not form part of the aggregated proof - /// are just proved individually. The aggregation is a feature of the - /// Bulletproofs protocol that improves efficiency. - /// - /// `upper_bound_bit_length` is used to determine the upper bound for the - /// range proof, which is set to `2^upper_bound_bit_length` i.e. the - /// range proof shows `0 <= liability <= 2^upper_bound_bit_length` for - /// some liability. The type is set to `u8` because we are not expected - /// to require bounds higher than $2^256$. Note that if the value is set - /// to anything other than 8, 16, 32 or 64 the Bulletproofs code will return - /// an Err. + /// Parameters: + /// - `leaf_node`: node for which the inclusion proof must be generated for. + /// - `path_siblings`: the sibling nodes of the nodes that form the path + /// from leaf to root. + /// - `aggregation_factor`: + #[doc = include_str!("./shared_docs/aggregation_factor.md")] + /// - `upper_bound_bit_length`: + #[doc = include_str!("./shared_docs/upper_bound_bit_length.md")] pub fn generate( leaf_node: Node, path_siblings: PathSiblings, diff --git a/src/max_liability.rs b/src/max_liability.rs index 1c71593e..d4cfb17c 100644 --- a/src/max_liability.rs +++ b/src/max_liability.rs @@ -128,4 +128,18 @@ mod tests { .find(|i| **i == pow_2) .is_some()); } + + #[test] + fn upper_bound_bit_length_works_for_100() { + let input_max_liability = 100u64; + let max_liability = MaxLiability(input_max_liability); + + let logarithm_of_input_truncated = (input_max_liability as f64).log2() as u8; + assert_eq!(logarithm_of_input_truncated, 6u8); + let nearest_pow_2_greater_than = 8u8; + + assert_eq!(max_liability.as_range_proof_upper_bound_bit_length(), nearest_pow_2_greater_than); + } + + // TODO test more cases for the upper_bound_bit_length function } diff --git a/src/shared_docs/aggregation_factor.md b/src/shared_docs/aggregation_factor.md new file mode 100644 index 00000000..ff80f423 --- /dev/null +++ b/src/shared_docs/aggregation_factor.md @@ -0,0 +1 @@ +used to determine how many of the range proofs are aggregated. Those that do not form part of the aggregated proof are just proved individually. The aggregation is a feature of the Bulletproofs protocol that improves efficiency. diff --git a/src/shared_docs/upper_bound_bit_length.md b/src/shared_docs/upper_bound_bit_length.md new file mode 100644 index 00000000..d4d101d2 --- /dev/null +++ b/src/shared_docs/upper_bound_bit_length.md @@ -0,0 +1 @@ +used to determine the upper bound for the range proof, which is set to `2^upper_bound_bit_length` i.e. the range proof shows `0 <= liability <= 2^upper_bound_bit_length` for some liability. The type is set to `u8` because we are not expected to require bounds higher than $2^256$. Note that if the value is set to anything other than 8, 16, 32 or 64 the Bulletproofs code will return an Err. From f5d66e711bc34e809a689df66c396109656f0a0d Mon Sep 17 00:00:00 2001 From: Stent Date: Fri, 19 Jan 2024 11:18:26 +0000 Subject: [PATCH 03/12] Fix bug with deserialization of Secret & Salt --- src/dapol_config.rs | 3 +++ src/dapol_tree.rs | 6 +++--- src/read_write_utils.rs | 5 +++++ src/salt.rs | 16 +++++++++++++--- src/secret.rs | 17 +++++++++++++---- 5 files changed, 37 insertions(+), 10 deletions(-) diff --git a/src/dapol_config.rs b/src/dapol_config.rs index 315b5236..0a5358b4 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -96,9 +96,12 @@ pub struct DapolConfig { secrets: SecretsConfig, } +use serde_with::{serde_as, DisplayFromStr}; +#[serde_as] #[derive(Deserialize, Debug, Clone, Default, PartialEq)] pub struct SecretsConfig { file_path: Option, + #[serde_as(as = "Option")] master_secret: Option, } diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 3a32c22e..8bdf7956 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -321,7 +321,7 @@ impl DapolTree { path.clone().into_os_string() ); - read_write_utils::serialize_to_bin_file(self, path.clone()).log_on_err()?; + read_write_utils::serialize_to_bin_file(&self, path.clone()).log_on_err()?; Ok(path) } @@ -347,7 +347,7 @@ mod tests { use crate::utils::test_utils::assert_err; use crate::{ AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, MaxThreadCount, Salt, - Secret, + Secret, accumulators, }; use std::path::PathBuf; use std::str::FromStr; @@ -422,7 +422,7 @@ mod tests { #[test] fn serde_does_not_change_tree() { let tree = new_tree(); - let path = PathBuf::from_str("./mytree.myext").unwrap(); + let path = PathBuf::from_str("./examples/my_serialized_tree_for_unit_tests.dapoltree").unwrap(); let path = tree.serialize(path).unwrap(); let tree_2 = DapolTree::deserialize(path).unwrap(); diff --git a/src/read_write_utils.rs b/src/read_write_utils.rs index a9f45cb9..d4adb57e 100644 --- a/src/read_write_utils.rs +++ b/src/read_write_utils.rs @@ -47,6 +47,11 @@ pub fn deserialize_from_bin_file(path: PathBuf) -> Result fmt::Result { + let s = String::from_utf8_lossy(&self.0); + write!(f, "{}", s) + } +} + // ------------------------------------------------------------------------------------------------- // From for KDF key. From 5db966e84a2fff17a5e10692f2c90ebd1820e724 Mon Sep 17 00:00:00 2001 From: Stent Date: Fri, 19 Jan 2024 13:23:59 +0000 Subject: [PATCH 04/12] Add functions to API that match paper Also some docs & logging. --- examples/main.rs | 4 +- src/accumulators.rs | 35 ++++++++- src/accumulators/ndm_smt.rs | 22 +++++- src/dapol_config.rs | 5 -- src/dapol_tree.rs | 98 +++++++++++++++++++++---- src/lib.rs | 4 +- src/shared_docs/root_blinding_factor.md | 3 + src/shared_docs/root_commitment.md | 3 + src/shared_docs/root_hash.md | 3 + src/shared_docs/root_liability.md | 3 + 10 files changed, 151 insertions(+), 29 deletions(-) create mode 100644 src/shared_docs/root_blinding_factor.md create mode 100644 src/shared_docs/root_commitment.md create mode 100644 src/shared_docs/root_hash.md create mode 100644 src/shared_docs/root_liability.md diff --git a/examples/main.rs b/examples/main.rs index 96cd18fa..f719a3c8 100644 --- a/examples/main.rs +++ b/examples/main.rs @@ -123,7 +123,7 @@ pub fn simple_inclusion_proof_generation_and_verification( entity_id: dapol::EntityId, ) { let inclusion_proof = dapol_tree.generate_inclusion_proof(&entity_id).unwrap(); - inclusion_proof.verify(dapol_tree.root_hash()).unwrap(); + inclusion_proof.verify(dapol_tree.root_hash().clone()).unwrap(); } /// Example on how to generate and verify inclusion proofs. @@ -146,5 +146,5 @@ pub fn advanced_inclusion_proof_generation_and_verification( .generate_inclusion_proof_with(&entity_id, aggregation_factor) .unwrap(); - inclusion_proof.verify(dapol_tree.root_hash()).unwrap(); + inclusion_proof.verify(dapol_tree.root_hash().clone()).unwrap(); } diff --git a/src/accumulators.rs b/src/accumulators.rs index 90aaa6cb..03cc4abd 100644 --- a/src/accumulators.rs +++ b/src/accumulators.rs @@ -4,8 +4,10 @@ //! types of accumulators, which can all be found under this module. use clap::ValueEnum; +use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; use primitive_types::H256; use serde::{Deserialize, Serialize}; +use std::fmt; mod ndm_smt; pub use ndm_smt::{NdmSmt, NdmSmtError, RandomXCoordGenerator}; @@ -34,12 +36,33 @@ impl Accumulator { } } - /// Return the hash digest/bytes of the root node for the binary tree. - pub fn root_hash(&self) -> H256 { + #[doc = include_str!("./shared_docs/root_hash.md")] + pub fn root_hash(&self) -> &H256 { match self { Self::NdmSmt(ndm_smt) => ndm_smt.root_hash(), } } + + #[doc = include_str!("./shared_docs/root_commitment.md")] + pub fn root_commitment(&self) -> &RistrettoPoint { + match self { + Self::NdmSmt(ndm_smt) => ndm_smt.root_commitment(), + } + } + + #[doc = include_str!("./shared_docs/root_liability.md")] + pub fn root_liability(&self) -> u64 { + match self { + Self::NdmSmt(ndm_smt) => ndm_smt.root_liability(), + } + } + + #[doc = include_str!("./shared_docs/root_blinding_factor.md")] + pub fn root_blinding_factor(&self) -> &Scalar { + match self { + Self::NdmSmt(ndm_smt) => ndm_smt.root_blinding_factor(), + } + } } /// Various supported accumulator types. @@ -49,3 +72,11 @@ pub enum AccumulatorType { NdmSmt, // TODO add other accumulators.. } + +impl fmt::Display for AccumulatorType { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + AccumulatorType::NdmSmt => write!(f, "NDM-SMT") + } + } +} diff --git a/src/accumulators/ndm_smt.rs b/src/accumulators/ndm_smt.rs index 17d10e2d..b12b2158 100644 --- a/src/accumulators/ndm_smt.rs +++ b/src/accumulators/ndm_smt.rs @@ -1,5 +1,6 @@ use std::collections::HashMap; +use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; use primitive_types::H256; use serde::{Deserialize, Serialize}; @@ -244,9 +245,24 @@ impl NdmSmt { )?) } - /// Return the hash digest/bytes of the root node for the binary tree. - pub fn root_hash(&self) -> H256 { - self.binary_tree.root().content.hash + #[doc = include_str!("../shared_docs/root_hash.md")] + pub fn root_hash(&self) -> &H256 { + &self.binary_tree.root().content.hash + } + + #[doc = include_str!("../shared_docs/root_hash.md")] + pub fn root_commitment(&self) -> &RistrettoPoint { + &self.binary_tree.root().content.commitment + } + + #[doc = include_str!("../shared_docs/root_liability.md")] + pub fn root_liability(&self) -> u64 { + self.binary_tree.root().content.liability + } + + #[doc = include_str!("../shared_docs/root_blinding_factor.md")] + pub fn root_blinding_factor(&self) -> &Scalar { + &self.binary_tree.root().content.blinding_factor } /// Hash map giving the x-coord that each entity is mapped to. diff --git a/src/dapol_config.rs b/src/dapol_config.rs index 0a5358b4..8304806e 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -353,11 +353,6 @@ impl DapolConfig { ) .log_on_err()?; - info!( - "Successfully built DAPOL tree with root hash {:?}", - dapol_tree.root_hash() - ); - Ok(dapol_tree) } diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 8bdf7956..d36eac93 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -1,3 +1,4 @@ +use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; use log::{debug, info}; use primitive_types::H256; use serde::{Deserialize, Serialize}; @@ -50,15 +51,15 @@ impl DapolTree { /// value must be set. /// - `salt_b`: If not set then it will be randomly generated. #[doc = include_str!("./shared_docs/salt_b.md")] - /// - `salt_s`: If not set then it will be - /// randomly generated. + /// - `salt_s`: If not set then it will be randomly generated. #[doc = include_str!("./shared_docs/salt_s.md")] /// - `max_liability`: If not set then a default value is used. #[doc = include_str!("./shared_docs/max_liability.md")] - /// - `height`: If not set the [default height] will be used [crate][Height]. + /// - `height`: If not set the [default height] will be used + /// [crate][Height]. #[doc = include_str!("./shared_docs/height.md")] - /// - `max_thread_count`: If not set the max parallelism of the - /// underlying machine will be used. + /// - `max_thread_count`: If not set the max parallelism of the underlying + /// machine will be used. #[doc = include_str!("./shared_docs/max_thread_count.md")] /// - `secrets_file_path`: Path to the secrets file. If not present the /// secrets will be generated randomly. @@ -124,19 +125,50 @@ impl DapolTree { } }; - Ok(DapolTree { + let tree = DapolTree { accumulator, master_secret, - salt_s, - salt_b, + salt_b: salt_b.clone(), + salt_s: salt_s.clone(), max_liability, - }) + }; + + info!( + "\nDAPOL tree has been constructed. Public data:\n \ + - accumulator type: {}\n \ + - height: {}\n \ + - salt_b: 0x{}\n \ + - salt_s: 0x{}\n \ + - root hash: 0x{}\n \ + - root commitment: {:?}", + accumulator_type, + height.as_u32(), + salt_b + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + salt_s + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + tree.root_hash() + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + tree.root_commitment() + ); + + Ok(tree) } /// Generate an inclusion proof for the given `entity_id`. /// /// Parameters: - /// - `entity_id`: unique ID for the entity that the proof will be generated for. + /// - `entity_id`: unique ID for the entity that the proof will be generated + /// for. /// - `aggregation_factor`: #[doc = include_str!("./shared_docs/aggregation_factor.md")] pub fn generate_inclusion_proof_with( @@ -159,7 +191,8 @@ impl DapolTree { /// Generate an inclusion proof for the given `entity_id`. /// /// Parameters: - /// - `entity_id`: unique ID for the entity that the proof will be generated for. + /// - `entity_id`: unique ID for the entity that the proof will be generated + /// for. pub fn generate_inclusion_proof( &self, entity_id: &EntityId, @@ -222,10 +255,42 @@ impl DapolTree { } } - /// Return the hash digest/bytes of the root node for the binary tree. - pub fn root_hash(&self) -> H256 { + /// Hash & Pedersen commitment for the root node of the Merkle Sum Tree. + /// + /// These values can be made public and do not disclose secret information + /// about the tree such as the number of leaf nodes or their liabilities. + pub fn root_public_data(&self) -> (&H256, &RistrettoPoint) { + (self.root_hash(), self.root_commitment()) + } + + /// Liability & blinding factor that make up the Pederesen commitment of + /// the Merkle Sum Tree. + /// + /// Neither of these values should be made public if the owner of the tree + /// does not want to disclose the total liability sum of their users. + pub fn root_secret_data(&self) -> (u64, &Scalar) { + (self.root_liability(), self.root_blinding_factor()) + } + + #[doc = include_str!("./shared_docs/root_hash.md")] + pub fn root_hash(&self) -> &H256 { self.accumulator.root_hash() } + + #[doc = include_str!("./shared_docs/root_commitment.md")] + pub fn root_commitment(&self) -> &RistrettoPoint { + self.accumulator.root_commitment() + } + + #[doc = include_str!("./shared_docs/root_liability.md")] + pub fn root_liability(&self) -> u64 { + self.root_liability() + } + + #[doc = include_str!("./shared_docs/root_blinding_factor.md")] + pub fn root_blinding_factor(&self) -> &Scalar { + self.root_blinding_factor() + } } // ------------------------------------------------------------------------------------------------- @@ -346,8 +411,8 @@ mod tests { use super::*; use crate::utils::test_utils::assert_err; use crate::{ - AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, MaxThreadCount, Salt, - Secret, accumulators, + accumulators, AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, + MaxThreadCount, Salt, Secret, }; use std::path::PathBuf; use std::str::FromStr; @@ -422,7 +487,8 @@ mod tests { #[test] fn serde_does_not_change_tree() { let tree = new_tree(); - let path = PathBuf::from_str("./examples/my_serialized_tree_for_unit_tests.dapoltree").unwrap(); + let path = + PathBuf::from_str("./examples/my_serialized_tree_for_unit_tests.dapoltree").unwrap(); let path = tree.serialize(path).unwrap(); let tree_2 = DapolTree::deserialize(path).unwrap(); diff --git a/src/lib.rs b/src/lib.rs index a6c2d086..0de80dae 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -60,7 +60,9 @@ pub mod read_write_utils; pub mod utils; mod dapol_tree; -pub use dapol_tree::{DapolTree, DapolTreeError}; +pub use dapol_tree::{DapolTree, DapolTreeError, RootPublicData, RootSecretData}; + +pub use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; mod dapol_config; pub use dapol_config::{ diff --git a/src/shared_docs/root_blinding_factor.md b/src/shared_docs/root_blinding_factor.md new file mode 100644 index 00000000..4214b835 --- /dev/null +++ b/src/shared_docs/root_blinding_factor.md @@ -0,0 +1,3 @@ +Blinding factor of the Pedersen commitment in the root node of the Merkle Sum Tree. + +This value should not be made public if the tree owner does not want to disclose their total liabilities. diff --git a/src/shared_docs/root_commitment.md b/src/shared_docs/root_commitment.md new file mode 100644 index 00000000..76cd3ff8 --- /dev/null +++ b/src/shared_docs/root_commitment.md @@ -0,0 +1,3 @@ +Pedersen commitment in the root node for the Merkle Sum Tree. + +This value can be made public and does not disclose secret information about the tree such as the number of leaf nodes or their liabilities. diff --git a/src/shared_docs/root_hash.md b/src/shared_docs/root_hash.md new file mode 100644 index 00000000..637954c0 --- /dev/null +++ b/src/shared_docs/root_hash.md @@ -0,0 +1,3 @@ +Hash digest/bytes of the root node for the Merkle Sum Tree. + +This value can be made public and does not disclose secret information about the tree such as the number of leaf nodes or their liabilities. diff --git a/src/shared_docs/root_liability.md b/src/shared_docs/root_liability.md new file mode 100644 index 00000000..5688ecdc --- /dev/null +++ b/src/shared_docs/root_liability.md @@ -0,0 +1,3 @@ +Value that is hidden by the Pedersen commitment in the root node of the Merkle Sum Tree. + +This value is the total liability sum of the whole tree. It should not be made public if the tree owner does not want to disclose their total liabilities. From f257aaca8829d3d20d2d5eee742844407d2dcdef Mon Sep 17 00:00:00 2001 From: Stent Date: Fri, 19 Jan 2024 17:31:32 +0000 Subject: [PATCH 05/12] Add root serde, verification, cli integration & tests --- Cargo.toml | 3 +- benches/criterion_benches.rs | 2 +- benches/manual_benches.rs | 2 +- examples/root_public_data.json | 37 +++ examples/root_secret_data.json | 37 +++ src/cli.rs | 26 +- src/dapol_config.rs | 1 + src/dapol_tree.rs | 459 +++++++++++++++++++++++++++------ src/main.rs | 42 ++- src/read_write_utils.rs | 70 ++++- 10 files changed, 577 insertions(+), 102 deletions(-) create mode 100644 examples/root_public_data.json create mode 100644 examples/root_secret_data.json diff --git a/Cargo.toml b/Cargo.toml index 7b474deb..2953d839 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,9 +59,10 @@ patharg = "0.3.0" serde = { version = "1.0.188", features = ["derive"] } serde_with = "3.4.0" serde_bytes = "0.11.12" +serde_json = "1.0.111" +bincode = "1.3.3" toml = "0.8.2" csv = "1.3.0" -bincode = "1.3.3" chrono = "0.4.31" derive_builder = "0.12.0" diff --git a/benches/criterion_benches.rs b/benches/criterion_benches.rs index 9b95c9c9..cb16da1c 100644 --- a/benches/criterion_benches.rs +++ b/benches/criterion_benches.rs @@ -165,7 +165,7 @@ pub fn bench_build_tree(c: &mut Criterion) { let src_dir = env!("CARGO_MANIFEST_DIR"); let target_dir = Path::new(&src_dir).join("target"); let dir = target_dir.join("serialized_trees"); - let path = DapolTree::parse_serialization_path(dir).unwrap(); + let path = DapolTree::parse_tree_serialization_path(dir).unwrap(); let tree = dapol_tree.expect("Tree should have been built"); group.bench_function( diff --git a/benches/manual_benches.rs b/benches/manual_benches.rs index 34e15858..8e05af78 100644 --- a/benches/manual_benches.rs +++ b/benches/manual_benches.rs @@ -160,7 +160,7 @@ fn main() { let src_dir = env!("CARGO_MANIFEST_DIR"); let target_dir = Path::new(&src_dir).join("target"); let dir = target_dir.join("serialized_trees"); - let path = DapolTree::parse_serialization_path(dir).unwrap(); + let path = DapolTree::parse_tree_serialization_path(dir).unwrap(); let time_start = Instant::now(); dapol_tree diff --git a/examples/root_public_data.json b/examples/root_public_data.json new file mode 100644 index 00000000..a3d3aed8 --- /dev/null +++ b/examples/root_public_data.json @@ -0,0 +1,37 @@ +{ + "hash": "0x1a53a98ea36766fd0b5f0337270b1956d4e037682995b09ac89819c83633b04d", + "commitment": [ + 48, + 191, + 128, + 114, + 45, + 81, + 213, + 207, + 193, + 149, + 16, + 154, + 245, + 148, + 208, + 38, + 217, + 80, + 160, + 158, + 139, + 139, + 204, + 216, + 59, + 179, + 248, + 64, + 47, + 159, + 240, + 80 + ] +} \ No newline at end of file diff --git a/examples/root_secret_data.json b/examples/root_secret_data.json new file mode 100644 index 00000000..8375b703 --- /dev/null +++ b/examples/root_secret_data.json @@ -0,0 +1,37 @@ +{ + "liability": 53896881, + "blinding_factor": [ + 159, + 205, + 144, + 12, + 44, + 61, + 76, + 252, + 206, + 44, + 84, + 213, + 35, + 6, + 234, + 214, + 136, + 130, + 50, + 99, + 89, + 206, + 161, + 174, + 38, + 231, + 157, + 240, + 1, + 109, + 19, + 11 + ] +} \ No newline at end of file diff --git a/src/cli.rs b/src/cli.rs index 0602881a..55198377 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -13,7 +13,7 @@ use crate::{ accumulators::AccumulatorType, binary_tree::Height, percentage::{Percentage, ONE_HUNDRED_PERCENT}, - MaxLiability, MaxThreadCount, Salt + MaxLiability, MaxThreadCount, Salt, }; // ------------------------------------------------------------------------------------------------- @@ -54,6 +54,11 @@ pub enum Command { #[arg(short = 'S', long, value_name = "FILE_PATH", global = true, long_help = SERIALIZE_HELP)] serialize: Option, + + /// Serialize the root node to 2 files: one for the public data, and + /// one for the secret data. + #[arg(short, long, value_name = "DIR", global = true)] + root_serialize: Option, }, /// Generate inclusion proofs for entities. @@ -86,9 +91,9 @@ pub enum Command { /// Verify an inclusion proof. /// - /// The root hash of the tree is logged out on tree creation (an info-level log). - VerifyProof { - /// File path for the serialized inclusion proof json file. + /// Note: the root hash of the tree is logged out on tree creation (an info-level log). + VerifyInclusionProof { + /// File path for the serialized inclusion proof file. #[arg(short, long)] file_path: InputArg, @@ -96,6 +101,19 @@ pub enum Command { #[arg(short, long, value_parser = H256::from_str, value_name = "BYTES")] root_hash: H256, }, + + /// Verify the root node of a DAPOL tree. + /// + /// Note: the public data (commitment &) + VerifyRoot { + /// File path for the serialized public data of the root. + #[arg(short, long)] + root_pub: InputArg, + + /// File path for the serialized secret data of the root. + #[arg(short, long)] + root_pvt: InputArg, + }, } #[derive(Debug, Subcommand)] diff --git a/src/dapol_config.rs b/src/dapol_config.rs index 8304806e..23a0b16c 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -322,6 +322,7 @@ impl DapolConfig { } /// Try to construct a [crate][DapolTree] from the config. + // STENT TODO rather call this create_tree pub fn parse(self) -> Result { debug!("Parsing config to create a new DAPOL tree: {:?}", self); diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index d36eac93..92fb8094 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -1,3 +1,4 @@ +use bulletproofs::PedersenGens; use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; use log::{debug, info}; use primitive_types::H256; @@ -7,6 +8,7 @@ use std::path::PathBuf; use crate::{ accumulators::{Accumulator, AccumulatorType, NdmSmt, NdmSmtError}, read_write_utils::{self}, + secret, utils::LogOnErr, AggregationFactor, Entity, EntityId, Height, InclusionProof, MaxLiability, MaxThreadCount, Salt, Secret, @@ -15,6 +17,12 @@ use crate::{ const SERIALIZED_TREE_EXTENSION: &str = "dapoltree"; const SERIALIZED_TREE_FILE_PREFIX: &str = "proof_of_liabilities_merkle_sum_tree_"; +const SERIALIZED_ROOT_PUB_FILE_PREFIX: &str = "root_public_data_"; +const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "root_secret_data_"; + +// ------------------------------------------------------------------------------------------------- +// Main struct. + /// Proof of Liabilities Sparse Merkle Sum Tree. /// /// This is the top-most module in the hierarchy of the [dapol] crate. @@ -31,6 +39,32 @@ pub struct DapolTree { max_liability: MaxLiability, } +// ------------------------------------------------------------------------------------------------- +// Periphery structs. + +/// The public values of the root node. +/// +/// These values should be put on a Public Bulletin Board (such as a blockchain) +/// to legitimize the proof of liabilities. Without doing this there is no +/// guarantee to the user that their inclusion proof is checked against the same +/// data as other users' inclusion proofs. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RootPublicData { + pub hash: H256, + pub commitment: RistrettoPoint, +} + +/// The secret values of the root node. +/// +/// These are the values that are used to construct the Pedersen commitment. +/// These values should not be shared if the tree owner does not want to +/// disclose their total liability. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RootSecretData { + pub liability: u64, + pub blinding_factor: Scalar, +} + // ------------------------------------------------------------------------------------------------- // Construction & proof generation. @@ -133,33 +167,7 @@ impl DapolTree { max_liability, }; - info!( - "\nDAPOL tree has been constructed. Public data:\n \ - - accumulator type: {}\n \ - - height: {}\n \ - - salt_b: 0x{}\n \ - - salt_s: 0x{}\n \ - - root hash: 0x{}\n \ - - root commitment: {:?}", - accumulator_type, - height.as_u32(), - salt_b - .as_bytes() - .iter() - .map(|b| format!("{:02x}", b)) - .collect::(), - salt_s - .as_bytes() - .iter() - .map(|b| format!("{:02x}", b)) - .collect::(), - tree.root_hash() - .as_bytes() - .iter() - .map(|b| format!("{:02x}", b)) - .collect::(), - tree.root_commitment() - ); + tree.log_successful_tree_creation(); Ok(tree) } @@ -208,6 +216,27 @@ impl DapolTree { ), } } + + /// Check that the public Pedersen commitment corresponds to the secret + /// values of the root. + /// + /// If the secret data does not match the commitment then false is returned, + /// otherwise true. + pub fn verify_root_commitment( + public_commitment: &RistrettoPoint, + root_secret_data: &RootSecretData, + ) -> Result<(), DapolTreeError> { + let commitment = PedersenGens::default().commit( + Scalar::from(root_secret_data.liability), + root_secret_data.blinding_factor, + ); + + if commitment == *public_commitment { + Ok(()) + } else { + Err(DapolTreeError::RootVerificationError) + } + } } // ------------------------------------------------------------------------------------------------- @@ -259,8 +288,11 @@ impl DapolTree { /// /// These values can be made public and do not disclose secret information /// about the tree such as the number of leaf nodes or their liabilities. - pub fn root_public_data(&self) -> (&H256, &RistrettoPoint) { - (self.root_hash(), self.root_commitment()) + pub fn root_public_data(&self) -> RootPublicData { + RootPublicData { + hash: self.root_hash().clone(), + commitment: self.root_commitment().clone(), + } } /// Liability & blinding factor that make up the Pederesen commitment of @@ -268,8 +300,11 @@ impl DapolTree { /// /// Neither of these values should be made public if the owner of the tree /// does not want to disclose the total liability sum of their users. - pub fn root_secret_data(&self) -> (u64, &Scalar) { - (self.root_liability(), self.root_blinding_factor()) + pub fn root_secret_data(&self) -> RootSecretData { + RootSecretData { + liability: self.root_liability(), + blinding_factor: self.root_blinding_factor().clone(), + } } #[doc = include_str!("./shared_docs/root_hash.md")] @@ -284,12 +319,12 @@ impl DapolTree { #[doc = include_str!("./shared_docs/root_liability.md")] pub fn root_liability(&self) -> u64 { - self.root_liability() + self.accumulator.root_liability() } #[doc = include_str!("./shared_docs/root_blinding_factor.md")] pub fn root_blinding_factor(&self) -> &Scalar { - self.root_blinding_factor() + self.accumulator.root_blinding_factor() } } @@ -297,47 +332,34 @@ impl DapolTree { // Serialization & deserialization. impl DapolTree { - /// Try deserialize from the given file path. - /// - /// The file is assumed to be in [bincode] format. - /// - /// An error is logged and returned if - /// 1. The file cannot be opened. - /// 2. The [bincode] deserializer fails. - pub fn deserialize(path: PathBuf) -> Result { - debug!( - "Deserializing accumulator from file {:?}", - path.clone().into_os_string() - ); - - match path.extension() { - Some(ext) => { - if ext != SERIALIZED_TREE_EXTENSION { - Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { - expected: SERIALIZED_TREE_EXTENSION.to_owned(), - actual: ext.to_os_string(), - })?; - } - } - None => Err(read_write_utils::ReadWriteError::NotAFile( - path.clone().into_os_string(), - ))?, - } - - let dapol_tree: DapolTree = - read_write_utils::deserialize_from_bin_file(path.clone()).log_on_err()?; - - let root_hash = match &dapol_tree.accumulator { - Accumulator::NdmSmt(ndm_smt) => ndm_smt.root_hash(), - }; - + fn log_successful_tree_creation(&self) { info!( - "Successfully deserialized dapol tree from file {:?} with root hash {:?}", - path.clone().into_os_string(), - root_hash + "\nDAPOL tree has been constructed. Public data:\n \ + - accumulator type: {}\n \ + - height: {}\n \ + - salt_b: 0x{}\n \ + - salt_s: 0x{}\n \ + - root hash: 0x{}\n \ + - root commitment: {:?}", + self.accumulator_type(), + self.height().as_u32(), + self.salt_b + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + self.salt_s + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + self.root_hash() + .as_bytes() + .iter() + .map(|b| format!("{:02x}", b)) + .collect::(), + self.root_commitment().compress() ); - - Ok(dapol_tree) } /// Parse `path` as one that points to a serialized dapol tree file. @@ -347,7 +369,7 @@ impl DapolTree { /// `path`. 2. Non-existing directory: in this case all dirs in the path /// are created, and a default file name is appended. /// 3. File in existing dir: in this case the extension is checked to be - /// [SERIALIZED_TREE_EXTENSION], then `path` is returned. + /// ".[SERIALIZED_TREE_EXTENSION]", then `path` is returned. /// 4. File in non-existing dir: dirs in the path are created and the file /// extension is checked. /// @@ -359,9 +381,9 @@ impl DapolTree { /// use std::path::PathBuf; /// /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_serialization_path(dir).unwrap(); + /// let path = DapolTree::parse_tree_serialization_path(dir).unwrap(); /// ``` - pub fn parse_serialization_path( + pub fn parse_tree_serialization_path( path: PathBuf, ) -> Result { read_write_utils::parse_serialization_path( @@ -371,15 +393,98 @@ impl DapolTree { ) } - /// Serialize to a file. + /// Parse `path` as one that points to a json file containing the public + /// data of the root node. + /// + /// `path` can be either of the following: + /// 1. Existing directory: in this case a default file name is appended to + /// `path`. 2. Non-existing directory: in this case all dirs in the path + /// are created, and a default file name is appended. + /// 3. File in existing dir: in this case the extension is checked to be + /// ".json", then `path` is returned. + /// 4. File in non-existing dir: dirs in the path are created and the file + /// extension is checked. + /// + /// The file prefix is [SERIALIZED_ROOT_PUB_FILE_PREFIX]. + /// + /// Example: + /// ``` + /// use dapol::DapolTree; + /// use std::path::PathBuf; + /// + /// let dir = PathBuf::from("./"); + /// let path = DapolTree::parse_root_public_data_serialization_path(dir).unwrap(); + /// ``` + pub fn parse_root_public_data_serialization_path( + path: PathBuf, + ) -> Result { + read_write_utils::parse_serialization_path(path, "json", SERIALIZED_ROOT_PUB_FILE_PREFIX) + } + + /// Parse `path` as one that points to a json file containing the secret + /// data of the root node. + /// + /// `path` can be either of the following: + /// 1. Existing directory: in this case a default file name is appended to + /// `path`. 2. Non-existing directory: in this case all dirs in the path + /// are created, and a default file name is appended. + /// 3. File in existing dir: in this case the extension is checked to be + /// ".json", then `path` is returned. + /// 4. File in non-existing dir: dirs in the path are created and the file + /// extension is checked. /// - /// Serialization is done using [bincode] + /// The file prefix is [SERIALIZED_ROOT_PVT_FILE_PREFIX]. + /// + /// Example: + /// ``` + /// use dapol::DapolTree; + /// use std::path::PathBuf; + /// + /// let dir = PathBuf::from("./"); + /// let path = DapolTree::parse_root_secret_data_serialization_path(dir).unwrap(); + /// ``` + pub fn parse_root_secret_data_serialization_path( + path: PathBuf, + ) -> Result { + read_write_utils::parse_serialization_path(path, "json", SERIALIZED_ROOT_PVT_FILE_PREFIX) + } + + /// Serialize the whole tree to a file. + /// + /// Serialization is done using [bincode]. /// /// An error is returned if /// 1. [bincode] fails to serialize the file. /// 2. There is an issue opening or writing the file. + /// + /// `path` can be either of the following: + /// 1. Existing directory: in this case a default file name is appended to + /// `path`. 2. Non-existing directory: in this case all dirs in the path + /// are created, and a default file name is appended. + /// 3. File in existing dir: in this case the extension is checked to be + /// ".[SERIALIZED_TREE_EXTENSION]", then `path` is returned. + /// 4. File in non-existing dir: dirs in the path are created and the file + /// extension is checked. + /// + /// The file prefix is [SERIALIZED_TREE_FILE_PREFIX]. + /// + /// Example: + /// ``` + /// use dapol::{DapolTree, DapolConfig}; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// + /// let config_file_path = examples_dir.join("dapol_config_example.toml"); + /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); + /// let dapol_tree = dapol_config.parse().unwrap(); + /// + /// let tree_path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); + /// let _ = dapol_tree.serialize(tree_path).unwrap(); + /// ``` pub fn serialize(&self, path: PathBuf) -> Result { - let path = DapolTree::parse_serialization_path(path)?; + let path = DapolTree::parse_tree_serialization_path(path)?; info!( "Serializing accumulator to file {:?}", @@ -390,6 +495,186 @@ impl DapolTree { Ok(path) } + + /// Serialize the public root node data to a file. + /// + /// The data that will be serialized to a json file: + /// - Pedersen commitment + /// - hash + /// + /// An error is returned if + /// 1. [serde_json] fails to serialize the file. + /// 2. There is an issue opening or writing to the file. + /// + /// `path` can be either of the following: + /// 1. Existing directory: in this case a default file name is appended to + /// `path`. 2. Non-existing directory: in this case all dirs in the path + /// are created, and a default file name is appended. + /// 3. File in existing dir: in this case the extension is checked to be + /// ".json", then `path` is returned. + /// 4. File in non-existing dir: dirs in the path are created and the file + /// extension is checked. + /// + /// The file prefix is [SERIALIZED_ROOT_PUB_FILE_PREFIX]. + /// + /// Example: + /// ``` + /// use dapol::{DapolTree, DapolConfig}; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// let config_file_path = examples_dir.join("dapol_config_example.toml"); + /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); + /// let dapol_tree = dapol_config.parse().unwrap(); + /// + /// let public_root_path = examples_dir.join("root_public_data.json"); + /// let _ = dapol_tree.serialize_public_root_data(public_root_path).unwrap(); + /// ``` + pub fn serialize_public_root_data(&self, path: PathBuf) -> Result { + let public_root_data: RootPublicData = self.root_public_data(); + let path = DapolTree::parse_root_public_data_serialization_path(path.clone())?; + read_write_utils::serialize_to_json_file(&public_root_data, path.clone())?; + + Ok(path) + } + + /// Serialize the public root node data to a file. + /// + /// The data that will be serialized to a json file: + /// - Pedersen commitment + /// - hash + /// - secret data (liability & blinding factor for Pedersen commitment) + /// + /// An error is returned if + /// 1. [serde_json] fails to serialize any of the files. + /// 2. There is an issue opening or writing to any of the files. + /// + /// `path` can be either of the following: + /// 1. Existing directory: in this case a default file name is appended to + /// `path`. 2. Non-existing directory: in this case all dirs in the path + /// are created, and a default file name is appended. + /// 3. File in existing dir: in this case the extension is checked to be + /// ".json", then `path` is returned. + /// 4. File in non-existing dir: dirs in the path are created and the file + /// extension is checked. + /// + /// The file prefix is [SERIALIZED_ROOT_PVT_FILE_PREFIX]. + /// + /// Example: + /// ``` + /// use dapol::{DapolTree, DapolConfig}; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// let config_file_path = examples_dir.join("dapol_config_example.toml"); + /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); + /// let dapol_tree = dapol_config.parse().unwrap(); + /// + /// let secret_root_path = examples_dir.join("root_secret_data.json"); + /// let _ = dapol_tree.serialize_secret_root_data(secret_root_path).unwrap(); + /// ``` + pub fn serialize_secret_root_data(&self, dir: PathBuf) -> Result { + let secret_root_data: RootSecretData = self.root_secret_data(); + let path = DapolTree::parse_root_secret_data_serialization_path(dir.clone())?; + read_write_utils::serialize_to_json_file(&secret_root_data, path.clone())?; + + Ok(path) + } + + /// Deserialize the tree from the given file path. + /// + /// The file is assumed to be in [bincode] format. + /// + /// An error is logged and returned if + /// 1. The file cannot be opened. + /// 2. The [bincode] deserializer fails. + /// 3. The file extension is not ".[SERIALIZED_TREE_EXTENSION]" + /// + /// Example: + /// ``` + /// use dapol::{DapolTree, DapolConfig}; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// let tree_path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); + /// let _ = DapolTree::deserialize(tree_path).unwrap(); + /// ``` + pub fn deserialize(path: PathBuf) -> Result { + debug!( + "Deserializing DapolTree from file {:?}", + path.clone().into_os_string() + ); + + read_write_utils::check_deserialization_path(&path, SERIALIZED_TREE_EXTENSION)?; + + let dapol_tree: DapolTree = + read_write_utils::deserialize_from_bin_file(path.clone()).log_on_err()?; + + dapol_tree.log_successful_tree_creation(); + + Ok(dapol_tree) + } + + /// Deserialize the public root data from the given file path. + /// + /// The file is assumed to be in json format. + /// + /// An error is logged and returned if + /// 1. The file cannot be opened. + /// 2. The [serde_json] deserializer fails. + /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" + /// + /// Example: + /// ``` + /// use dapol::DapolTree; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// let public_root_path = examples_dir.join("root_public_data.json"); + /// + /// let public_root_data = DapolTree::deserialize_public_root_data(public_root_path).unwrap(); + /// ``` + pub fn deserialize_public_root_data(path: PathBuf) -> Result { + read_write_utils::check_deserialization_path(&path, "json")?; + + let root_public_data: RootPublicData = + read_write_utils::deserialize_from_json_file(path.clone()).log_on_err()?; + + Ok(root_public_data) + } + + /// Deserialize the secret root data from the given file path. + /// + /// The file is assumed to be in json format. + /// + /// An error is logged and returned if + /// 1. The file cannot be opened. + /// 2. The [serde_json] deserializer fails. + /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" + /// + /// Example: + /// ``` + /// use dapol::DapolTree; + /// use std::path::Path; + /// + /// let src_dir = env!("CARGO_MANIFEST_DIR"); + /// let examples_dir = Path::new(&src_dir).join("examples"); + /// let secret_root_path = examples_dir.join("root_secret_data.json"); + /// + /// let secret_root_data = DapolTree::deserialize_secret_root_data(secret_root_path).unwrap(); + /// ``` + pub fn deserialize_secret_root_data(path: PathBuf) -> Result { + read_write_utils::check_deserialization_path(&path, "json")?; + + let root_secret_data: RootSecretData = + read_write_utils::deserialize_from_json_file(path.clone()).log_on_err()?; + + Ok(root_secret_data) + } } // ------------------------------------------------------------------------------------------------- @@ -402,6 +687,8 @@ pub enum DapolTreeError { SerdeError(#[from] read_write_utils::ReadWriteError), #[error("Error constructing a new NDM-SMT")] NdmSmtConstructionError(#[from] NdmSmtError), + #[error("Verification of root data failed")] + RootVerificationError, } // ------------------------------------------------------------------------------------------------- @@ -414,7 +701,7 @@ mod tests { accumulators, AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, MaxThreadCount, Salt, Secret, }; - use std::path::PathBuf; + use std::path::{Path, PathBuf}; use std::str::FromStr; #[test] @@ -487,9 +774,13 @@ mod tests { #[test] fn serde_does_not_change_tree() { let tree = new_tree(); - let path = - PathBuf::from_str("./examples/my_serialized_tree_for_unit_tests.dapoltree").unwrap(); - let path = tree.serialize(path).unwrap(); + + let src_dir = env!("CARGO_MANIFEST_DIR"); + let examples_dir = Path::new(&src_dir).join("examples"); + let path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); + let path_2 = tree.serialize(path.clone()).unwrap(); + assert_eq!(path, path_2); + let tree_2 = DapolTree::deserialize(path).unwrap(); assert_eq!(tree.master_secret(), tree_2.master_secret()); @@ -505,7 +796,7 @@ mod tests { fn serialization_path_parser_fails_for_unsupported_extensions() { let path = PathBuf::from_str("./mytree.myext").unwrap(); - let res = DapolTree::parse_serialization_path(path); + let res = DapolTree::parse_tree_serialization_path(path); assert_err!( res, Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { @@ -518,7 +809,7 @@ mod tests { #[test] fn serialization_path_parser_gives_correct_file_prefix() { let path = PathBuf::from_str("./").unwrap(); - let path = DapolTree::parse_serialization_path(path).unwrap(); + let path = DapolTree::parse_tree_serialization_path(path).unwrap(); assert!(path .to_str() .unwrap() diff --git a/src/main.rs b/src/main.rs index 35e05920..e64a358c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -21,6 +21,7 @@ fn main() { build_kind, gen_proofs, serialize, + root_serialize, } => { initialize_machine_parallelism(); @@ -35,7 +36,7 @@ fn main() { match serialize { Some(patharg) => { let path = patharg.into_path().expect("Expected a file path, not stdout"); - DapolTree::parse_serialization_path(path).log_on_err().ok() + DapolTree::parse_tree_serialization_path(path).log_on_err().ok() } None => None, } @@ -87,7 +88,9 @@ fn main() { .if_none_then(|| { debug!("No serialization path set, skipping serialization of the tree"); }) - .consume(|path| { dapol_tree.serialize(path).unwrap(); }); + .consume(|path| { + dapol_tree.serialize(path).unwrap(); + }); if let Some(patharg) = gen_proofs { let entity_ids = EntityIdsParser::from( @@ -107,6 +110,21 @@ fn main() { proof.serialize(&entity_id, dir.clone()).log_on_err_unwrap(); } } + + if let Some(patharg) = root_serialize { + let path = patharg + .into_path() + .expect("Expected a file path, not stdout"); + if path.is_dir() { + panic!("Root serialization path must be a directory so multiple files can be created"); + } + dapol_tree + .serialize_public_root_data(path.clone()) + .log_on_err_unwrap(); + dapol_tree + .serialize_secret_root_data(path) + .log_on_err_unwrap(); + } } Command::GenProofs { entity_ids, @@ -136,16 +154,13 @@ fn main() { for entity_id in entity_ids { let proof = dapol_tree - .generate_inclusion_proof_with( - &entity_id, - aggregation_factor.clone(), - ) + .generate_inclusion_proof_with(&entity_id, aggregation_factor.clone()) .log_on_err_unwrap(); proof.serialize(&entity_id, dir.clone()).log_on_err_unwrap(); } } - Command::VerifyProof { + Command::VerifyInclusionProof { file_path, root_hash, } => { @@ -158,6 +173,19 @@ fn main() { proof.verify(root_hash).log_on_err_unwrap(); } + Command::VerifyRoot { root_pub, root_pvt } => { + let public_root_data = DapolTree::deserialize_public_root_data( + root_pub.into_path().expect("Expected file path, not stdin"), + ) + .log_on_err_unwrap(); + let secret_root_data = DapolTree::deserialize_secret_root_data( + root_pvt.into_path().expect("Expected file path, not stdin"), + ) + .log_on_err_unwrap(); + + DapolTree::verify_root_commitment(&public_root_data.commitment, &secret_root_data) + .log_on_err_unwrap(); + } } } diff --git a/src/read_write_utils.rs b/src/read_write_utils.rs index d4adb57e..00f083fc 100644 --- a/src/read_write_utils.rs +++ b/src/read_write_utils.rs @@ -48,9 +48,37 @@ pub fn deserialize_from_bin_file(path: PathBuf) -> Result( + structure: &T, + path: PathBuf, +) -> Result<(), ReadWriteError> { + let mut file = File::create(path)?; + let encoded = serde_json::to_writer_pretty(file, structure); + + Ok(()) +} + +/// Try to deserialize the given json file to the specified type. +/// +/// An error is returned if +/// 1. The file cannot be opened. +/// 2. The [serde_json] deserializer fails. +#[stime("debug")] +pub fn deserialize_from_json_file(path: PathBuf) -> Result { + let file = File::open(path)?; + let buf_reader = BufReader::new(file); + let decoded: T = serde_json::from_reader(buf_reader)?; Ok(decoded) } @@ -124,6 +152,34 @@ pub fn parse_serialization_path( } } +/// Sanity check the path for use in deserialization. +/// +/// The path is checked to +/// 1. Not be a directory +/// 2. Have the correct file extension +pub fn check_deserialization_path( + path: &PathBuf, + expected_ext: &str, +) -> Result<(), ReadWriteError> { + if path.is_dir() { + return Err(ReadWriteError::NotAFile(path.clone().into_os_string())); + } + + match path.extension() { + Some(ext) => { + if ext == expected_ext { + Ok(()) + } else { + Err(ReadWriteError::UnsupportedFileExtension { + expected: expected_ext.to_owned(), + actual: ext.to_os_string(), + }) + } + } + None => Err(ReadWriteError::NoFileExtension(path.clone().into_os_string())), + } +} + // ------------------------------------------------------------------------------------------------- // Errors. @@ -131,12 +187,16 @@ pub fn parse_serialization_path( pub enum ReadWriteError { #[error("Problem serializing/deserializing with bincode")] BincodeSerdeError(#[from] bincode::Error), + #[error("Problem serializing/deserializing with serde_json")] + JsonSerdeError(#[from] serde_json::Error), #[error("Problem writing to file")] FileWriteError(#[from] std::io::Error), #[error("Unknown file extension {actual:?}, expected {expected}")] UnsupportedFileExtension { expected: String, actual: OsString }, - #[error("Expected a file but only a path was given: {0:?}")] + #[error("Expected a file but only a directory was given: {0:?}")] NotAFile(OsString), + #[error("No file extension found in path {0:?}")] + NoFileExtension(OsString), } // ------------------------------------------------------------------------------------------------- @@ -186,5 +246,7 @@ mod tests { // TODO test that intermediate dirs are created, but how to do this // without actually creating dirs? + + // TODO test binary & json se/de workse } } From 2b71a3c23879e94262bcbff2774f6cb9ee58a868 Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 10:26:50 +0000 Subject: [PATCH 06/12] Docs & renaming --- README.md | 4 ++- examples/main.rs | 30 +++++++++++++++++++-- examples/public_root_data.json | 37 +++++++++++++++++++++++++ examples/root_public_data.json | 37 ------------------------- examples/root_secret_data.json | 37 ------------------------- examples/secret_root_data.json | 37 +++++++++++++++++++++++++ src/dapol_tree.rs | 46 ++++++++++++++++---------------- src/lib.rs | 14 +++++----- src/shared_docs/max_liability.md | 2 +- 9 files changed, 137 insertions(+), 107 deletions(-) create mode 100644 examples/public_root_data.json delete mode 100644 examples/root_public_data.json delete mode 100644 examples/root_secret_data.json create mode 100644 examples/secret_root_data.json diff --git a/README.md b/README.md index 9d9fac1b..b0b7552e 100644 --- a/README.md +++ b/README.md @@ -20,8 +20,10 @@ use as is. The code has _not_ been audited yet (as of Nov 2023) and so it is not Important tasks still to be done: - Write a spec: https://github.com/silversixpence-crypto/dapol/issues/17 - Support the Deterministic mapping SMT accumulator type: https://github.com/silversixpence-crypto/dapol/issues/9 -- Fuzz some of the unit tests: https://github.com/silversixpence-crypto/dapol/issues/46 - Sort out version issues with dependencies: https://github.com/silversixpence-crypto/dapol/issues/11 +- Allow the tree to be updatable: https://github.com/silversixpence-crypto/dapol/issues/109 +- Finish integration tests: https://github.com/silversixpence-crypto/dapol/issues/42 +- Use a database as the backend storage system (as opposed to memory): https://github.com/silversixpence-crypto/dapol/issues/44 ## How this code can be used diff --git a/examples/main.rs b/examples/main.rs index f719a3c8..27690320 100644 --- a/examples/main.rs +++ b/examples/main.rs @@ -1,8 +1,10 @@ //! Example of a full PoL workflow. //! //! 1. Build a tree -//! 2. Generate an inclusion proof -//! 3. Verify an inclusion proof +//! 2. Serialize tree & root node +//! 3. Verify a root node +//! 4. Generate an inclusion proof +//! 5. Verify an inclusion proof //! //! At the time of writing (Nov 2023) only the NDM-SMT accumulator is supported //! so this is the only type of tree that is used in this example. @@ -14,6 +16,7 @@ extern crate clap_verbosity_flag; extern crate csv; extern crate dapol; +use dapol::DapolTree; use dapol::utils::LogOnErrUnwrap; fn main() { @@ -44,6 +47,29 @@ fn main() { // Since the mappings are not the same the root hashes won't be either. assert_ne!(dapol_tree_1.root_hash(), dapol_tree_2.root_hash()); + // ========================================================================= + // (De)serialization. + + let src_dir = env!("CARGO_MANIFEST_DIR"); + let examples_dir = Path::new(&src_dir).join("examples"); + let serialization_path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); + let _ = dapol_tree_1.serialize(serialization_path.clone()).unwrap(); + + let dapol_tree_1 = DapolTree::deserialize(serialization_path).unwrap(); + + let public_root_path = examples_dir.join("public_root_data.json"); + let _ = dapol_tree_1.serialize_public_root_data(public_root_path.clone()).unwrap(); + let public_root_data = DapolTree::deserialize_public_root_data(public_root_path).unwrap(); + + let secret_root_path = examples_dir.join("secret_root_data.json"); + let _ = dapol_tree_1.serialize_secret_root_data(secret_root_path.clone()).unwrap(); + let secret_root_data = DapolTree::deserialize_secret_root_data(secret_root_path).unwrap(); + + // ========================================================================= + // Root node verification. + + DapolTree::verify_root_commitment(&public_root_data.commitment, &secret_root_data).unwrap(); + // ========================================================================= // Inclusion proof generation & verification. diff --git a/examples/public_root_data.json b/examples/public_root_data.json new file mode 100644 index 00000000..46c81151 --- /dev/null +++ b/examples/public_root_data.json @@ -0,0 +1,37 @@ +{ + "hash": "0x8bf4c6912041f4eebb5a6b985e53c5e6f8898042498803525aa6c4e56454d483", + "commitment": [ + 40, + 63, + 79, + 24, + 194, + 36, + 148, + 152, + 49, + 214, + 80, + 38, + 80, + 108, + 148, + 9, + 131, + 128, + 73, + 120, + 146, + 14, + 194, + 150, + 106, + 103, + 245, + 173, + 43, + 248, + 166, + 96 + ] +} \ No newline at end of file diff --git a/examples/root_public_data.json b/examples/root_public_data.json deleted file mode 100644 index a3d3aed8..00000000 --- a/examples/root_public_data.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "hash": "0x1a53a98ea36766fd0b5f0337270b1956d4e037682995b09ac89819c83633b04d", - "commitment": [ - 48, - 191, - 128, - 114, - 45, - 81, - 213, - 207, - 193, - 149, - 16, - 154, - 245, - 148, - 208, - 38, - 217, - 80, - 160, - 158, - 139, - 139, - 204, - 216, - 59, - 179, - 248, - 64, - 47, - 159, - 240, - 80 - ] -} \ No newline at end of file diff --git a/examples/root_secret_data.json b/examples/root_secret_data.json deleted file mode 100644 index 8375b703..00000000 --- a/examples/root_secret_data.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "liability": 53896881, - "blinding_factor": [ - 159, - 205, - 144, - 12, - 44, - 61, - 76, - 252, - 206, - 44, - 84, - 213, - 35, - 6, - 234, - 214, - 136, - 130, - 50, - 99, - 89, - 206, - 161, - 174, - 38, - 231, - 157, - 240, - 1, - 109, - 19, - 11 - ] -} \ No newline at end of file diff --git a/examples/secret_root_data.json b/examples/secret_root_data.json new file mode 100644 index 00000000..cfa11a2c --- /dev/null +++ b/examples/secret_root_data.json @@ -0,0 +1,37 @@ +{ + "liability": 53896881, + "blinding_factor": [ + 15, + 154, + 58, + 19, + 252, + 218, + 134, + 210, + 201, + 24, + 149, + 34, + 174, + 231, + 209, + 86, + 151, + 51, + 209, + 239, + 191, + 188, + 235, + 107, + 118, + 192, + 67, + 97, + 103, + 148, + 107, + 1 + ] +} \ No newline at end of file diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 92fb8094..a4023ad0 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -17,8 +17,8 @@ use crate::{ const SERIALIZED_TREE_EXTENSION: &str = "dapoltree"; const SERIALIZED_TREE_FILE_PREFIX: &str = "proof_of_liabilities_merkle_sum_tree_"; -const SERIALIZED_ROOT_PUB_FILE_PREFIX: &str = "root_public_data_"; -const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "root_secret_data_"; +const SERIALIZED_ROOT_PUB_FILE_PREFIX: &str = "public_root_data_"; +const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "secret_root_data_"; // ------------------------------------------------------------------------------------------------- // Main struct. @@ -224,11 +224,11 @@ impl DapolTree { /// otherwise true. pub fn verify_root_commitment( public_commitment: &RistrettoPoint, - root_secret_data: &RootSecretData, + secret_root_data: &RootSecretData, ) -> Result<(), DapolTreeError> { let commitment = PedersenGens::default().commit( - Scalar::from(root_secret_data.liability), - root_secret_data.blinding_factor, + Scalar::from(secret_root_data.liability), + secret_root_data.blinding_factor, ); if commitment == *public_commitment { @@ -288,7 +288,7 @@ impl DapolTree { /// /// These values can be made public and do not disclose secret information /// about the tree such as the number of leaf nodes or their liabilities. - pub fn root_public_data(&self) -> RootPublicData { + pub fn public_root_data(&self) -> RootPublicData { RootPublicData { hash: self.root_hash().clone(), commitment: self.root_commitment().clone(), @@ -300,7 +300,7 @@ impl DapolTree { /// /// Neither of these values should be made public if the owner of the tree /// does not want to disclose the total liability sum of their users. - pub fn root_secret_data(&self) -> RootSecretData { + pub fn secret_root_data(&self) -> RootSecretData { RootSecretData { liability: self.root_liability(), blinding_factor: self.root_blinding_factor().clone(), @@ -413,9 +413,9 @@ impl DapolTree { /// use std::path::PathBuf; /// /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_root_public_data_serialization_path(dir).unwrap(); + /// let path = DapolTree::parse_public_root_data_serialization_path(dir).unwrap(); /// ``` - pub fn parse_root_public_data_serialization_path( + pub fn parse_public_root_data_serialization_path( path: PathBuf, ) -> Result { read_write_utils::parse_serialization_path(path, "json", SERIALIZED_ROOT_PUB_FILE_PREFIX) @@ -441,9 +441,9 @@ impl DapolTree { /// use std::path::PathBuf; /// /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_root_secret_data_serialization_path(dir).unwrap(); + /// let path = DapolTree::parse_secret_root_data_serialization_path(dir).unwrap(); /// ``` - pub fn parse_root_secret_data_serialization_path( + pub fn parse_secret_root_data_serialization_path( path: PathBuf, ) -> Result { read_write_utils::parse_serialization_path(path, "json", SERIALIZED_ROOT_PVT_FILE_PREFIX) @@ -528,12 +528,12 @@ impl DapolTree { /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); /// let dapol_tree = dapol_config.parse().unwrap(); /// - /// let public_root_path = examples_dir.join("root_public_data.json"); + /// let public_root_path = examples_dir.join("public_root_data.json"); /// let _ = dapol_tree.serialize_public_root_data(public_root_path).unwrap(); /// ``` pub fn serialize_public_root_data(&self, path: PathBuf) -> Result { - let public_root_data: RootPublicData = self.root_public_data(); - let path = DapolTree::parse_root_public_data_serialization_path(path.clone())?; + let public_root_data: RootPublicData = self.public_root_data(); + let path = DapolTree::parse_public_root_data_serialization_path(path.clone())?; read_write_utils::serialize_to_json_file(&public_root_data, path.clone())?; Ok(path) @@ -572,12 +572,12 @@ impl DapolTree { /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); /// let dapol_tree = dapol_config.parse().unwrap(); /// - /// let secret_root_path = examples_dir.join("root_secret_data.json"); + /// let secret_root_path = examples_dir.join("secret_root_data.json"); /// let _ = dapol_tree.serialize_secret_root_data(secret_root_path).unwrap(); /// ``` pub fn serialize_secret_root_data(&self, dir: PathBuf) -> Result { - let secret_root_data: RootSecretData = self.root_secret_data(); - let path = DapolTree::parse_root_secret_data_serialization_path(dir.clone())?; + let secret_root_data: RootSecretData = self.secret_root_data(); + let path = DapolTree::parse_secret_root_data_serialization_path(dir.clone())?; read_write_utils::serialize_to_json_file(&secret_root_data, path.clone())?; Ok(path) @@ -634,17 +634,17 @@ impl DapolTree { /// /// let src_dir = env!("CARGO_MANIFEST_DIR"); /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let public_root_path = examples_dir.join("root_public_data.json"); + /// let public_root_path = examples_dir.join("public_root_data.json"); /// /// let public_root_data = DapolTree::deserialize_public_root_data(public_root_path).unwrap(); /// ``` pub fn deserialize_public_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; - let root_public_data: RootPublicData = + let public_root_data: RootPublicData = read_write_utils::deserialize_from_json_file(path.clone()).log_on_err()?; - Ok(root_public_data) + Ok(public_root_data) } /// Deserialize the secret root data from the given file path. @@ -663,17 +663,17 @@ impl DapolTree { /// /// let src_dir = env!("CARGO_MANIFEST_DIR"); /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let secret_root_path = examples_dir.join("root_secret_data.json"); + /// let secret_root_path = examples_dir.join("secret_root_data.json"); /// /// let secret_root_data = DapolTree::deserialize_secret_root_data(secret_root_path).unwrap(); /// ``` pub fn deserialize_secret_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; - let root_secret_data: RootSecretData = + let secret_root_data: RootSecretData = read_write_utils::deserialize_from_json_file(path.clone()).log_on_err()?; - Ok(root_secret_data) + Ok(secret_root_data) } } diff --git a/src/lib.rs b/src/lib.rs index 0de80dae..282fb559 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -22,11 +22,13 @@ //! This project is currently still a work in progress, but is ready for //! use as is. The code has _not_ been audited yet (as of Nov 2023). Progress can be tracked [here](https://github.com/silversixpence-crypto/dapol/issues/91). //! -//! A Rust crate has not been released yet, progress can be tracked [here](https://github.com/silversixpence-crypto/dapol/issues/13). -//! -//! A spec for this code still needs to be [written](https://github.com/silversixpence-crypto/dapol/issues/17). -//! -//! A fuzzing technique should be used for the unit [tests](https://github.com/silversixpence-crypto/dapol/issues/46). +//! Important tasks still to be done: +//! - [Write a spec](https://github.com/silversixpence-crypto/dapol/issues/17) +//! - [Support the Deterministic mapping SMT accumulator type](https://github.com/silversixpence-crypto/dapol/issues/9) +//! - [Sort out version issues with dependencies](https://github.com/silversixpence-crypto/dapol/issues/11) +//! - [Allow the tree to be updatable](https://github.com/silversixpence-crypto/dapol/issues/109) +//! - [Finish integration tests](https://github.com/silversixpence-crypto/dapol/issues/42) +//! - [Use a database as the backend storage system](https://github.com/silversixpence-crypto/dapol/issues/44) (as opposed to memory) //! //! Performance can be [improved](https://github.com/silversixpence-crypto/dapol/issues/44). //! @@ -43,7 +45,7 @@ //! //! ### Rust API //! -//! The library has not been released as a crate yet (as of Nov 2023) but the API has the following capabilities: +//! The API has the following capabilities: //! - build a tree using the builder pattern or a configuration file //! - generate inclusion proofs from a list of entity IDs (tree required) //! - verify an inclusion proof using a root hash (no tree required) diff --git a/src/shared_docs/max_liability.md b/src/shared_docs/max_liability.md index 0f9644df..165b8fed 100644 --- a/src/shared_docs/max_liability.md +++ b/src/shared_docs/max_liability.md @@ -1 +1 @@ -This is a public value representing the maximum amount that any single entity's liability can be, and is used in the range proofs: $[0, 2^{\text{height}} \times \text{max_liability}]$ +This is a public value representing the maximum amount that any single entity's liability can be, and is used in the range proofs: $[0, 2^{\text{height}} \times \text{max liability}]$ From 66d4701ee1c068fa2114da57aa1757327b33fc6c Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 12:26:05 +0000 Subject: [PATCH 07/12] Move dapoltree doctests to unit tests, and add random seed feature --- Cargo.toml | 5 +- examples/main.rs | 4 +- examples/public_root_data.json | 62 +-- examples/secret_root_data.json | 64 +-- src/accumulators/ndm_smt.rs | 84 +++- src/accumulators/ndm_smt/x_coord_generator.rs | 12 +- src/dapol_config.rs | 96 +++- src/dapol_tree.rs | 468 ++++++++++-------- src/lib.rs | 31 +- src/utils.rs | 2 +- 10 files changed, 540 insertions(+), 288 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2953d839..ec18117b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,8 @@ bench = false primitive-types = { version = "0.12.1", features = ["serde"] } # H256 & U256 (I think parity uses this so maybe we just use that crate instead) thiserror = "1.0" +derive_builder = "0.12.0" +chrono = "0.4.31" # crypto rand = "0.8.5" @@ -63,14 +65,13 @@ serde_json = "1.0.111" bincode = "1.3.3" toml = "0.8.2" csv = "1.3.0" -chrono = "0.4.31" -derive_builder = "0.12.0" # fuzzing arbitrary = { version = "1", optional = true, features = ["derive"] } [features] fuzzing = ["rand/small_rng", "arbitrary"] +testing = [] [dev-dependencies] criterion = "0.5.0" diff --git a/examples/main.rs b/examples/main.rs index 27690320..bb904512 100644 --- a/examples/main.rs +++ b/examples/main.rs @@ -58,11 +58,11 @@ fn main() { let dapol_tree_1 = DapolTree::deserialize(serialization_path).unwrap(); let public_root_path = examples_dir.join("public_root_data.json"); - let _ = dapol_tree_1.serialize_public_root_data(public_root_path.clone()).unwrap(); + // let _ = dapol_tree_1.serialize_public_root_data(public_root_path.clone()).unwrap(); let public_root_data = DapolTree::deserialize_public_root_data(public_root_path).unwrap(); let secret_root_path = examples_dir.join("secret_root_data.json"); - let _ = dapol_tree_1.serialize_secret_root_data(secret_root_path.clone()).unwrap(); + // let _ = dapol_tree_1.serialize_secret_root_data(secret_root_path.clone()).unwrap(); let secret_root_data = DapolTree::deserialize_secret_root_data(secret_root_path).unwrap(); // ========================================================================= diff --git a/examples/public_root_data.json b/examples/public_root_data.json index 46c81151..b7ee2402 100644 --- a/examples/public_root_data.json +++ b/examples/public_root_data.json @@ -1,37 +1,37 @@ { - "hash": "0x8bf4c6912041f4eebb5a6b985e53c5e6f8898042498803525aa6c4e56454d483", + "hash": "0x68534c84b86c0698abf4126f3555dc38480a202a6ca4dfde025d751bdaccce8c", "commitment": [ - 40, - 63, - 79, - 24, - 194, - 36, - 148, - 152, - 49, - 214, - 80, - 38, - 80, - 108, - 148, - 9, - 131, - 128, - 73, - 120, - 146, - 14, - 194, + 238, + 140, + 55, + 234, + 181, + 121, + 215, + 189, + 227, + 164, + 196, 150, - 106, + 54, + 239, + 110, + 15, + 146, + 251, + 232, + 6, + 154, 103, - 245, - 173, - 43, - 248, - 166, - 96 + 113, + 210, + 252, + 38, + 200, + 64, + 138, + 4, + 134, + 53 ] } \ No newline at end of file diff --git a/examples/secret_root_data.json b/examples/secret_root_data.json index cfa11a2c..db2c0ad8 100644 --- a/examples/secret_root_data.json +++ b/examples/secret_root_data.json @@ -1,37 +1,37 @@ { - "liability": 53896881, + "liability": 1, "blinding_factor": [ - 15, - 154, + 233, + 183, + 205, + 125, + 5, + 61, + 130, + 1, + 208, + 177, + 183, + 150, + 60, + 234, + 88, + 127, 58, - 19, - 252, - 218, - 134, - 210, - 201, - 24, - 149, - 34, - 174, - 231, - 209, - 86, - 151, - 51, - 209, - 239, - 191, - 188, - 235, - 107, - 118, - 192, - 67, - 97, - 103, - 148, - 107, - 1 + 15, + 242, + 33, + 169, + 179, + 242, + 27, + 156, + 29, + 68, + 220, + 124, + 229, + 8, + 5 ] } \ No newline at end of file diff --git a/src/accumulators/ndm_smt.rs b/src/accumulators/ndm_smt.rs index b12b2158..695f302c 100644 --- a/src/accumulators/ndm_smt.rs +++ b/src/accumulators/ndm_smt.rs @@ -89,6 +89,86 @@ impl NdmSmt { height: Height, max_thread_count: MaxThreadCount, entities: Vec, + ) -> Result { + let x_coord_generator = RandomXCoordGenerator::new(&height); + + NdmSmt::new_with_random_x_coord_generator( + master_secret, + salt_b, + salt_s, + height, + max_thread_count, + entities, + x_coord_generator, + ) + } + + /// Constructor for testing purposes. + /// + /// Note: This is **not** cryptographically secure and should only be used + /// for testing. + /// + /// Parameters: + /// - `master_secret`: + #[doc = include_str!("../shared_docs/master_secret.md")] + /// - `salt_b`: + #[doc = include_str!("../shared_docs/salt_b.md")] + /// - `salt_s`: + #[doc = include_str!("../shared_docs/salt_s.md")] + /// - `height`: + #[doc = include_str!("../shared_docs/height.md")] + /// - `max_thread_count`: + #[doc = include_str!("../shared_docs/max_thread_count.md")] + /// - `entities`: + #[doc = include_str!("../shared_docs/entities_vector.md")] + /// Each element in `entities` is converted to an + /// [input leaf node] and randomly assigned a position on the + /// bottom layer of the tree. + /// - `seed`: random seed for the x-coord PRNG mapping algorithm. + /// + /// An [NdmSmtError] is returned if: + /// 1. There are more entities than the height allows i.e. more entities + /// than would fit on the bottom layer. + /// 2. The tree build fails for some reason. + /// 3. There are duplicate entity IDs. + /// + /// The function will panic if there is a problem joining onto a spawned + /// thread, or if concurrent variables are not able to be locked. It's not + /// clear how to recover from these scenarios because variables may be in + /// an unknown state, so rather panic. + /// + /// [input leaf node]: crate::binary_tree::InputLeafNode + #[cfg(any(test, feature = "testing"))] + pub fn new_with_random_seed( + master_secret: Secret, + salt_b: Salt, + salt_s: Salt, + height: Height, + max_thread_count: MaxThreadCount, + entities: Vec, + seed: u64, + ) -> Result { + let x_coord_generator = RandomXCoordGenerator::new_with_seed(&height, seed); + + NdmSmt::new_with_random_x_coord_generator( + master_secret, + salt_b, + salt_s, + height, + max_thread_count, + entities, + x_coord_generator, + ) + } + + fn new_with_random_x_coord_generator( + master_secret: Secret, + salt_b: Salt, + salt_s: Salt, + height: Height, + max_thread_count: MaxThreadCount, + entities: Vec, + mut x_coord_generator: RandomXCoordGenerator, ) -> Result { let master_secret_bytes = master_secret.as_bytes(); let salt_b_bytes = salt_b.as_bytes(); @@ -118,7 +198,6 @@ impl NdmSmt { let tmr = timer!(Level::Debug; "Entity to leaf node conversion"); - let mut x_coord_generator = RandomXCoordGenerator::new(&height); let mut x_coords = Vec::::with_capacity(entities.len()); for _i in 0..entities.len() { @@ -203,7 +282,8 @@ impl NdmSmt { #[doc = include_str!("../shared_docs/salt_b.md")] /// - `salt_s`: #[doc = include_str!("../shared_docs/salt_s.md")] - /// - `entity_id`: unique ID for the entity that the proof will be generated for. + /// - `entity_id`: unique ID for the entity that the proof will be generated + /// for. /// - `aggregation_factor` is used to determine how many of the range proofs /// are aggregated. Those that do not form part of the aggregated proof /// are just proved individually. The aggregation is a feature of the diff --git a/src/accumulators/ndm_smt/x_coord_generator.rs b/src/accumulators/ndm_smt/x_coord_generator.rs index d987eb86..63e47f0d 100644 --- a/src/accumulators/ndm_smt/x_coord_generator.rs +++ b/src/accumulators/ndm_smt/x_coord_generator.rs @@ -1,5 +1,5 @@ use crate::binary_tree::Height; -use rand::distributions::{Distribution, Uniform}; +use rand::distributions::{Uniform}; use std::collections::HashMap; /// Used for generating unique x-coordinate values on the bottom layer of the @@ -88,7 +88,11 @@ impl RandomXCoordGenerator { } } - #[cfg(any(test, fuzzing))] + /// Constructor using random seed. + /// + /// Note: This is **not** cryptographically secure and should only be + /// used for testing. + #[cfg(any(test, feature = "fuzzing", feature = "testing"))] pub fn new_with_seed(height: &Height, seed: u64) -> Self { RandomXCoordGenerator { used_x_coords: HashMap::::new(), @@ -144,7 +148,7 @@ trait Sampleable { fn sample_range(&mut self, lower: u64, upper: u64) -> u64; } -#[cfg(not(any(test, fuzzing)))] +#[cfg(not(any(test, feature = "fuzzing", feature = "testing")))] mod rng_selector { use rand::distributions::Uniform; use rand::{rngs::ThreadRng, thread_rng, Rng}; @@ -167,7 +171,7 @@ mod rng_selector { } } -#[cfg(any(test, fuzzing))] +#[cfg(any(test, feature = "fuzzing", feature = "testing"))] mod rng_selector { use rand::Rng; use rand::{rngs::SmallRng, SeedableRng}; diff --git a/src/dapol_config.rs b/src/dapol_config.rs index 23a0b16c..6b0ec1e0 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -1,5 +1,5 @@ use derive_builder::Builder; -use log::{debug, info}; +use log::debug; use serde::Deserialize; use std::{ffi::OsString, fs::File, io::Read, path::PathBuf, str::FromStr}; @@ -89,6 +89,9 @@ pub struct DapolConfig { #[doc = include_str!("./shared_docs/max_thread_count.md")] max_thread_count: MaxThreadCount, + #[builder(setter(custom))] + random_seed: Option, + #[builder(private)] entities: EntityConfig, @@ -220,6 +223,26 @@ impl DapolConfigBuilder { self } + /// For seeding any PRNG to have deterministic output. + /// + /// Note: This is **not** cryptographically secure and should only be used + /// for testing. + #[cfg(any(test, feature = "testing"))] + pub fn random_seed(&mut self, random_seed: u64) -> &mut Self { + self.random_seed = Some(Some(random_seed)); + self + } + + #[cfg(any(test, feature = "testing"))] + fn get_random_seed(&self) -> Option { + self.random_seed.unwrap_or(None) + } + + #[cfg(not(any(test, feature = "testing")))] + fn get_random_seed(&self) -> Option { + None + } + /// Build the config struct. pub fn build(&self) -> Result { let accumulator_type = @@ -256,6 +279,7 @@ impl DapolConfigBuilder { let height = self.height.unwrap_or_default(); let max_thread_count = self.max_thread_count.unwrap_or_default(); let max_liability = self.max_liability.unwrap_or_default(); + let random_seed = self.get_random_seed(); Ok(DapolConfig { accumulator_type, @@ -266,6 +290,7 @@ impl DapolConfigBuilder { max_thread_count, entities, secrets, + random_seed, }) } } @@ -323,6 +348,59 @@ impl DapolConfig { /// Try to construct a [crate][DapolTree] from the config. // STENT TODO rather call this create_tree + #[cfg(any(test, feature = "testing"))] + pub fn parse(self) -> Result { + debug!("Parsing config to create a new DAPOL tree: {:?}", self); + + let salt_b = self.salt_b; + let salt_s = self.salt_s; + + let entities = EntitiesParser::new() + .with_path_opt(self.entities.file_path) + .with_num_entities_opt(self.entities.num_random_entities) + .parse_file_or_generate_random()?; + + let master_secret = if let Some(path) = self.secrets.file_path { + Ok(DapolConfig::parse_secrets_file(path)?) + } else if let Some(master_secret) = self.secrets.master_secret { + Ok(master_secret) + } else { + Err(DapolConfigError::CannotFindMasterSecret) + }?; + + let dapol_tree = if let Some(random_seed) = self.random_seed { + DapolTree::new_with_random_seed( + self.accumulator_type, + master_secret, + salt_b, + salt_s, + self.max_liability, + self.max_thread_count, + self.height, + entities, + random_seed, + ) + .log_on_err()? + } else { + DapolTree::new( + self.accumulator_type, + master_secret, + salt_b, + salt_s, + self.max_liability, + self.max_thread_count, + self.height, + entities, + ) + .log_on_err()? + }; + + Ok(dapol_tree) + } + + /// Try to construct a [crate][DapolTree] from the config. + // STENT TODO rather call this create_tree + #[cfg(not(any(test, feature = "testing")))] pub fn parse(self) -> Result { debug!("Parsing config to create a new DAPOL tree: {:?}", self); @@ -342,7 +420,7 @@ impl DapolConfig { Err(DapolConfigError::CannotFindMasterSecret) }?; - let dapol_tree = DapolTree::new( + Ok(DapolTree::new( self.accumulator_type, master_secret, salt_b, @@ -352,9 +430,7 @@ impl DapolConfig { self.height, entities, ) - .log_on_err()?; - - Ok(dapol_tree) + .log_on_err()?) } /// Open and parse the secrets file, returning a [crate][Secret]. @@ -722,7 +798,10 @@ mod tests { .parse() .unwrap(); - assert_eq!(dapol_tree.entity_mapping().unwrap().len(), num_entities as usize); + assert_eq!( + dapol_tree.entity_mapping().unwrap().len(), + num_entities as usize + ); assert_eq!(dapol_tree.accumulator_type(), AccumulatorType::NdmSmt); assert_eq!(*dapol_tree.height(), height); assert_eq!(*dapol_tree.master_secret(), master_secret); @@ -747,7 +826,10 @@ mod tests { .parse() .unwrap(); - assert_eq!(dapol_tree.entity_mapping().unwrap().len(), num_random_entities as usize); + assert_eq!( + dapol_tree.entity_mapping().unwrap().len(), + num_random_entities as usize + ); } #[test] diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index a4023ad0..6ee2bf64 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -48,7 +48,7 @@ pub struct DapolTree { /// to legitimize the proof of liabilities. Without doing this there is no /// guarantee to the user that their inclusion proof is checked against the same /// data as other users' inclusion proofs. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct RootPublicData { pub hash: H256, pub commitment: RistrettoPoint, @@ -59,7 +59,7 @@ pub struct RootPublicData { /// These are the values that are used to construct the Pedersen commitment. /// These values should not be shared if the tree owner does not want to /// disclose their total liability. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct RootSecretData { pub liability: u64, pub blinding_factor: Scalar, @@ -172,6 +172,78 @@ impl DapolTree { Ok(tree) } + /// Constructor for testing purposes. + /// + /// Note: This is **not** cryptographically secure and should only be used + /// for testing. + /// + /// An error is returned if the underlying accumulator type construction + /// fails. + /// + /// - `accumulator_type`: This value must be set. + #[doc = include_str!("./shared_docs/accumulator_type.md")] + /// - `master_secret`: This value is known only to the tree generator, and + /// is used to determine all other secret values needed in the tree. This + /// value must be set. + /// - `salt_b`: If not set then it will be randomly generated. + #[doc = include_str!("./shared_docs/salt_b.md")] + /// - `salt_s`: If not set then it will be randomly generated. + #[doc = include_str!("./shared_docs/salt_s.md")] + /// - `max_liability`: If not set then a default value is used. + #[doc = include_str!("./shared_docs/max_liability.md")] + /// - `height`: If not set the [default height] will be used + /// [crate][Height]. + #[doc = include_str!("./shared_docs/height.md")] + /// - `max_thread_count`: If not set the max parallelism of the underlying + /// machine will be used. + #[doc = include_str!("./shared_docs/max_thread_count.md")] + /// - `secrets_file_path`: Path to the secrets file. If not present the + /// secrets will be generated randomly. + /// - `entities`: + #[doc = include_str!("./shared_docs/entities_vector.md")] + /// - `seed`: random seed for any PRNG used. + /// + /// [default height]: crate::Height::default + #[cfg(any(test, feature = "testing"))] + pub fn new_with_random_seed( + accumulator_type: AccumulatorType, + master_secret: Secret, + salt_b: Salt, + salt_s: Salt, + max_liability: MaxLiability, + max_thread_count: MaxThreadCount, + height: Height, + entities: Vec, + seed: u64, + ) -> Result { + let accumulator = match accumulator_type { + AccumulatorType::NdmSmt => { + let ndm_smt = NdmSmt::new_with_random_seed( + master_secret.clone(), + salt_b.clone(), + salt_s.clone(), + height, + max_thread_count, + entities, + seed, + )?; + Accumulator::NdmSmt(ndm_smt) + } + }; + + let tree = DapolTree { + accumulator, + master_secret, + salt_b: salt_b.clone(), + salt_s: salt_s.clone(), + max_liability, + }; + + tree.log_successful_tree_creation(); + + Ok(tree) + } + /// Generate an inclusion proof for the given `entity_id`. /// /// Parameters: @@ -374,15 +446,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_TREE_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::DapolTree; - /// use std::path::PathBuf; - /// - /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_tree_serialization_path(dir).unwrap(); - /// ``` pub fn parse_tree_serialization_path( path: PathBuf, ) -> Result { @@ -406,15 +469,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_ROOT_PUB_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::DapolTree; - /// use std::path::PathBuf; - /// - /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_public_root_data_serialization_path(dir).unwrap(); - /// ``` pub fn parse_public_root_data_serialization_path( path: PathBuf, ) -> Result { @@ -434,15 +488,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_ROOT_PVT_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::DapolTree; - /// use std::path::PathBuf; - /// - /// let dir = PathBuf::from("./"); - /// let path = DapolTree::parse_secret_root_data_serialization_path(dir).unwrap(); - /// ``` pub fn parse_secret_root_data_serialization_path( path: PathBuf, ) -> Result { @@ -467,22 +512,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_TREE_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::{DapolTree, DapolConfig}; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// - /// let config_file_path = examples_dir.join("dapol_config_example.toml"); - /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); - /// let dapol_tree = dapol_config.parse().unwrap(); - /// - /// let tree_path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); - /// let _ = dapol_tree.serialize(tree_path).unwrap(); - /// ``` pub fn serialize(&self, path: PathBuf) -> Result { let path = DapolTree::parse_tree_serialization_path(path)?; @@ -516,21 +545,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_ROOT_PUB_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::{DapolTree, DapolConfig}; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let config_file_path = examples_dir.join("dapol_config_example.toml"); - /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); - /// let dapol_tree = dapol_config.parse().unwrap(); - /// - /// let public_root_path = examples_dir.join("public_root_data.json"); - /// let _ = dapol_tree.serialize_public_root_data(public_root_path).unwrap(); - /// ``` pub fn serialize_public_root_data(&self, path: PathBuf) -> Result { let public_root_data: RootPublicData = self.public_root_data(); let path = DapolTree::parse_public_root_data_serialization_path(path.clone())?; @@ -560,21 +574,6 @@ impl DapolTree { /// extension is checked. /// /// The file prefix is [SERIALIZED_ROOT_PVT_FILE_PREFIX]. - /// - /// Example: - /// ``` - /// use dapol::{DapolTree, DapolConfig}; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let config_file_path = examples_dir.join("dapol_config_example.toml"); - /// let dapol_config = DapolConfig::deserialize(config_file_path).unwrap(); - /// let dapol_tree = dapol_config.parse().unwrap(); - /// - /// let secret_root_path = examples_dir.join("secret_root_data.json"); - /// let _ = dapol_tree.serialize_secret_root_data(secret_root_path).unwrap(); - /// ``` pub fn serialize_secret_root_data(&self, dir: PathBuf) -> Result { let secret_root_data: RootSecretData = self.secret_root_data(); let path = DapolTree::parse_secret_root_data_serialization_path(dir.clone())?; @@ -591,17 +590,6 @@ impl DapolTree { /// 1. The file cannot be opened. /// 2. The [bincode] deserializer fails. /// 3. The file extension is not ".[SERIALIZED_TREE_EXTENSION]" - /// - /// Example: - /// ``` - /// use dapol::{DapolTree, DapolConfig}; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let tree_path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); - /// let _ = DapolTree::deserialize(tree_path).unwrap(); - /// ``` pub fn deserialize(path: PathBuf) -> Result { debug!( "Deserializing DapolTree from file {:?}", @@ -626,18 +614,6 @@ impl DapolTree { /// 1. The file cannot be opened. /// 2. The [serde_json] deserializer fails. /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" - /// - /// Example: - /// ``` - /// use dapol::DapolTree; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let public_root_path = examples_dir.join("public_root_data.json"); - /// - /// let public_root_data = DapolTree::deserialize_public_root_data(public_root_path).unwrap(); - /// ``` pub fn deserialize_public_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; @@ -655,18 +631,6 @@ impl DapolTree { /// 1. The file cannot be opened. /// 2. The [serde_json] deserializer fails. /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" - /// - /// Example: - /// ``` - /// use dapol::DapolTree; - /// use std::path::Path; - /// - /// let src_dir = env!("CARGO_MANIFEST_DIR"); - /// let examples_dir = Path::new(&src_dir).join("examples"); - /// let secret_root_path = examples_dir.join("secret_root_data.json"); - /// - /// let secret_root_data = DapolTree::deserialize_secret_root_data(secret_root_path).unwrap(); - /// ``` pub fn deserialize_secret_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; @@ -698,14 +662,13 @@ mod tests { use super::*; use crate::utils::test_utils::assert_err; use crate::{ - accumulators, AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, - MaxThreadCount, Salt, Secret, + AccumulatorType, DapolTree, Entity, EntityId, Height, MaxLiability, MaxThreadCount, Salt, + Secret, }; use std::path::{Path, PathBuf}; use std::str::FromStr; - #[test] - fn constructor_and_getters_work() { + fn new_tree() -> DapolTree { let accumulator_type = AccumulatorType::NdmSmt; let height = Height::expect_from(8); let salt_b = Salt::from_str("salt_b").unwrap(); @@ -713,6 +676,7 @@ mod tests { let master_secret = Secret::from_str("master_secret").unwrap(); let max_liability = MaxLiability::from(10_000_000); let max_thread_count = MaxThreadCount::from(8); + let random_seed = 1; let entity = Entity { liability: 1u64, @@ -720,7 +684,7 @@ mod tests { }; let entities = vec![entity.clone()]; - let tree = DapolTree::new( + DapolTree::new_with_random_seed( accumulator_type.clone(), master_secret.clone(), salt_b.clone(), @@ -729,107 +693,209 @@ mod tests { max_thread_count.clone(), height.clone(), entities, + random_seed, ) - .unwrap(); - - assert_eq!(tree.master_secret(), &master_secret); - assert_eq!(tree.height(), &height); - assert_eq!(tree.max_liability(), &max_liability); - assert_eq!(tree.salt_b(), &salt_b); - assert_eq!(tree.salt_s(), &salt_s); - assert_eq!(tree.accumulator_type(), accumulator_type); + .unwrap() + } - assert!(tree.entity_mapping().is_some()); - assert!(tree.entity_mapping().unwrap().get(&entity.id).is_some()); + mod construction { + use super::*; + + #[test] + fn constructor_and_getters_work() { + let accumulator_type = AccumulatorType::NdmSmt; + let height = Height::expect_from(8); + let salt_b = Salt::from_str("salt_b").unwrap(); + let salt_s = Salt::from_str("salt_s").unwrap(); + let master_secret = Secret::from_str("master_secret").unwrap(); + let max_liability = MaxLiability::from(10_000_000); + let max_thread_count = MaxThreadCount::from(8); + let random_seed = 1u64; + + let entity = Entity { + liability: 1u64, + id: EntityId::from_str("id").unwrap(), + }; + let entities = vec![entity.clone()]; + + let tree = DapolTree::new_with_random_seed( + accumulator_type.clone(), + master_secret.clone(), + salt_b.clone(), + salt_s.clone(), + max_liability.clone(), + max_thread_count.clone(), + height.clone(), + entities, + random_seed, + ) + .unwrap(); + + assert_eq!(tree.master_secret(), &master_secret); + assert_eq!(tree.height(), &height); + assert_eq!(tree.max_liability(), &max_liability); + assert_eq!(tree.salt_b(), &salt_b); + assert_eq!(tree.salt_s(), &salt_s); + assert_eq!(tree.accumulator_type(), accumulator_type); + + assert!(tree.entity_mapping().is_some()); + assert!(tree.entity_mapping().unwrap().get(&entity.id).is_some()); + } } - fn new_tree() -> DapolTree { - let accumulator_type = AccumulatorType::NdmSmt; - let height = Height::expect_from(8); - let salt_b = Salt::from_str("salt_b").unwrap(); - let salt_s = Salt::from_str("salt_s").unwrap(); - let master_secret = Secret::from_str("master_secret").unwrap(); - let max_liability = MaxLiability::from(10_000_000); - let max_thread_count = MaxThreadCount::from(8); + mod serde { + use super::*; - let entity = Entity { - liability: 1u64, - id: EntityId::from_str("id").unwrap(), - }; - let entities = vec![entity.clone()]; + mod tree { + use super::*; - DapolTree::new( - accumulator_type.clone(), - master_secret.clone(), - salt_b.clone(), - salt_s.clone(), - max_liability.clone(), - max_thread_count.clone(), - height.clone(), - entities, - ) - .unwrap() - } + #[test] + fn serde_does_not_change_tree() { + let tree = new_tree(); - #[test] - fn serde_does_not_change_tree() { - let tree = new_tree(); + let src_dir = env!("CARGO_MANIFEST_DIR"); + let examples_dir = Path::new(&src_dir).join("examples"); + let path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); + let path_2 = tree.serialize(path.clone()).unwrap(); + assert_eq!(path, path_2); - let src_dir = env!("CARGO_MANIFEST_DIR"); - let examples_dir = Path::new(&src_dir).join("examples"); - let path = examples_dir.join("my_serialized_tree_for_testing.dapoltree"); - let path_2 = tree.serialize(path.clone()).unwrap(); - assert_eq!(path, path_2); + let tree_2 = DapolTree::deserialize(path).unwrap(); - let tree_2 = DapolTree::deserialize(path).unwrap(); + assert_eq!(tree.master_secret(), tree_2.master_secret()); + assert_eq!(tree.height(), tree_2.height()); + assert_eq!(tree.max_liability(), tree_2.max_liability()); + assert_eq!(tree.salt_b(), tree_2.salt_b()); + assert_eq!(tree.salt_s(), tree_2.salt_s()); + assert_eq!(tree.accumulator_type(), tree_2.accumulator_type()); + assert_eq!(tree.entity_mapping(), tree_2.entity_mapping()); + } - assert_eq!(tree.master_secret(), tree_2.master_secret()); - assert_eq!(tree.height(), tree_2.height()); - assert_eq!(tree.max_liability(), tree_2.max_liability()); - assert_eq!(tree.salt_b(), tree_2.salt_b()); - assert_eq!(tree.salt_s(), tree_2.salt_s()); - assert_eq!(tree.accumulator_type(), tree_2.accumulator_type()); - assert_eq!(tree.entity_mapping(), tree_2.entity_mapping()); - } + #[test] + fn serialization_path_parser_fails_for_unsupported_extensions() { + let path = PathBuf::from_str("./mytree.myext").unwrap(); + + let res = DapolTree::parse_tree_serialization_path(path); + assert_err!( + res, + Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { + expected: _, + actual: _ + }) + ); + } - #[test] - fn serialization_path_parser_fails_for_unsupported_extensions() { - let path = PathBuf::from_str("./mytree.myext").unwrap(); + #[test] + fn serialization_path_parser_gives_correct_file_prefix() { + let path = PathBuf::from_str("./").unwrap(); + let path = DapolTree::parse_tree_serialization_path(path).unwrap(); + assert!(path + .to_str() + .unwrap() + .contains("proof_of_liabilities_merkle_sum_tree_")); + } + } - let res = DapolTree::parse_tree_serialization_path(path); - assert_err!( - res, - Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { - expected: _, - actual: _ - }) - ); - } + mod public_root_data { + use super::*; - #[test] - fn serialization_path_parser_gives_correct_file_prefix() { - let path = PathBuf::from_str("./").unwrap(); - let path = DapolTree::parse_tree_serialization_path(path).unwrap(); - assert!(path - .to_str() - .unwrap() - .contains("proof_of_liabilities_merkle_sum_tree_")); - } + #[test] + fn serde_does_not_change_public_root_data() { + let tree = new_tree(); + let public_root_data = tree.public_root_data(); + + let src_dir = env!("CARGO_MANIFEST_DIR"); + let examples_dir = Path::new(&src_dir).join("examples"); + let path = examples_dir.join("public_root_data.json"); + let path_2 = tree.serialize_public_root_data(path.clone()).unwrap(); + assert_eq!(path, path_2); - #[test] - fn generate_inclusion_proof_works() { - let tree = new_tree(); - assert!(tree - .generate_inclusion_proof(&EntityId::from_str("id").unwrap()) - .is_ok()); + let public_root_data_2 = DapolTree::deserialize_public_root_data(path).unwrap(); + + assert_eq!(public_root_data, public_root_data_2); + } + + #[test] + fn public_root_data_serialization_path_parser_fails_for_unsupported_extensions() { + let path = PathBuf::from_str("./public_root_data.myext").unwrap(); + + let res = DapolTree::parse_public_root_data_serialization_path(path); + assert_err!( + res, + Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { + expected: _, + actual: _ + }) + ); + } + + #[test] + fn public_root_data_serialization_path_parser_gives_correct_file_prefix() { + let path = PathBuf::from_str("./").unwrap(); + let path = DapolTree::parse_public_root_data_serialization_path(path).unwrap(); + assert!(path.to_str().unwrap().contains("public_root_data_")); + } + } + + mod secret_root_data { + use super::*; + + #[test] + fn serde_does_not_change_secret_root_data() { + let tree = new_tree(); + let secret_root_data = tree.secret_root_data(); + + let src_dir = env!("CARGO_MANIFEST_DIR"); + let examples_dir = Path::new(&src_dir).join("examples"); + let path = examples_dir.join("secret_root_data.json"); + let path_2 = tree.serialize_secret_root_data(path.clone()).unwrap(); + assert_eq!(path, path_2); + + let secret_root_data_2 = DapolTree::deserialize_secret_root_data(path).unwrap(); + + assert_eq!(secret_root_data, secret_root_data_2); + } + + #[test] + fn secret_root_data_serialization_path_parser_fails_for_unsupported_extensions() { + let path = PathBuf::from_str("./secret_root_data.myext").unwrap(); + + let res = DapolTree::parse_secret_root_data_serialization_path(path); + assert_err!( + res, + Err(read_write_utils::ReadWriteError::UnsupportedFileExtension { + expected: _, + actual: _ + }) + ); + } + + #[test] + fn secret_root_data_serialization_path_parser_gives_correct_file_prefix() { + let path = PathBuf::from_str("./").unwrap(); + let path = DapolTree::parse_secret_root_data_serialization_path(path).unwrap(); + assert!(path.to_str().unwrap().contains("secret_root_data_")); + } + } } - #[test] - fn generate_inclusion_proof_with_aggregation_factor_works() { - let tree = new_tree(); - let agg = AggregationFactor::Divisor(2u8); - assert!(tree - .generate_inclusion_proof_with(&EntityId::from_str("id").unwrap(), agg) - .is_ok()); + mod inclusion_proofs { + use super::*; + + #[test] + fn generate_inclusion_proof_works() { + let tree = new_tree(); + assert!(tree + .generate_inclusion_proof(&EntityId::from_str("id").unwrap()) + .is_ok()); + } + + #[test] + fn generate_inclusion_proof_with_aggregation_factor_works() { + let tree = new_tree(); + let agg = AggregationFactor::Divisor(2u8); + assert!(tree + .generate_inclusion_proof_with(&EntityId::from_str("id").unwrap(), agg) + .is_ok()); + } } } diff --git a/src/lib.rs b/src/lib.rs index 282fb559..a90b3ddb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,11 +1,12 @@ // Copyright ⓒ 2023 SilverSixpence // Licensed under the MIT license // (see LICENSE or ) All files in the project carrying such -// notice may not be copied, modified, or distributed except according to those terms. +// notice may not be copied, modified, or distributed except according to those +// terms. //! # Proof of Liabilities protocol implemented in Rust //! -//! Implementation of the DAPOL+ protocol introduced in the "Generalized Proof of Liabilities" by Yan Ji and Konstantinos Chalkias ACM CCS 2021 paper, available [here](https://eprint.iacr.org/2021/1350) +//! Implementation of the DAPOL+ protocol introduced in the "Generalized Proof of Liabilities" by Yan Ji and Konstantinos Chalkias ACM CCS 2021 paper, available [here](https://eprint.iacr.org/2021/1350). //! //! See the [top-level doc for the project](https://hackmd.io/p0dy3R0RS5qpm3sX-_zreA) if you would like to know more about Proof of Liabilities. //! @@ -13,9 +14,15 @@ //! //! This library offers an efficient build algorithm for constructing a binary Merkle Sum Tree representing the liabilities of an organization. Efficiency is achieved through parallelization. Details on the algorithm used can be found in [the multi-threaded builder file](https://github.com/silversixpence-crypto/dapol/blob/main/src/binary_tree/tree_builder/multi_threaded.rs). //! -//! The paper describes a few different accumulator variants. The Sparse Merkle Sum Tree is the DAPOL+ accumulator, but there are a few different axes of variation, such as how the list of entities is embedded within the tree. The 4 accumulator variants are simply slightly different versions of the Sparse Merkle Sum Tree. Only the Non-Deterministic Mapping Sparse Merkle Tree variant has been implemented so far. +//! The paper describes a few different accumulator variants. The Sparse Merkle +//! Sum Tree is the DAPOL+ accumulator, but there are a few different axes of +//! variation, such as how the list of entities is embedded within the tree. The +//! 4 accumulator variants are simply slightly different versions of the Sparse +//! Merkle Sum Tree. Only the Non-Deterministic Mapping Sparse Merkle Tree +//! variant has been implemented so far. //! -//! The code offers inclusion proof generation & verification using the Bulletproofs protocol for the range proofs. +//! The code offers inclusion proof generation & verification using the +//! Bulletproofs protocol for the range proofs. //! //! ## Still to be done //! @@ -28,7 +35,8 @@ //! - [Sort out version issues with dependencies](https://github.com/silversixpence-crypto/dapol/issues/11) //! - [Allow the tree to be updatable](https://github.com/silversixpence-crypto/dapol/issues/109) //! - [Finish integration tests](https://github.com/silversixpence-crypto/dapol/issues/42) -//! - [Use a database as the backend storage system](https://github.com/silversixpence-crypto/dapol/issues/44) (as opposed to memory) +//! - [Use a database as the backend storage system](https://github.com/silversixpence-crypto/dapol/issues/44) +//! (as opposed to memory) //! //! Performance can be [improved](https://github.com/silversixpence-crypto/dapol/issues/44). //! @@ -37,7 +45,8 @@ //! - [ORAM-based SMT](https://github.com/silversixpence-crypto/dapol/issues/8) //! - [Hierarchical SMTs](https://github.com/silversixpence-crypto/dapol/issues/7) //! -//! Other than the above there are a few minor tasks to do, each of which has an issue for tracking. +//! Other than the above there are a few minor tasks to do, each of which has an +//! issue for tracking. //! //! ## How this code can be used //! @@ -53,6 +62,16 @@ //! ``` #![doc = include_str!("../examples/main.rs")] //! ``` +//! +//! ### Features +//! +//! #### Fuzzing +//! +//! This feature includes the libraries & features required to run the fuzzing tests. +//! +//! ### Testing +//! +//! This feature opens up additional functions for use withing the library, for usage in tests. One such functionality is the seeding of the NDM-SMT random mapping mechanism. During tests it's useful to be able to get deterministic tree builds, which cannot be done with plain NDM-SMT because the entities are randomly mapped to bottom-layer nodes. So adding the `testing` feature exposes functions that allow calling code to provide seeds for the PRNG from [rand]. mod kdf; diff --git a/src/utils.rs b/src/utils.rs index 10f004ae..e6cf3c57 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -132,7 +132,7 @@ impl ErrUnlessTrue for Option { // ------------------------------------------------------------------------------------------------- // Testing utils. -#[cfg(any(test, fuzzing))] +#[cfg(any(test, feature = "fuzzing"))] pub mod test_utils { /// Check 2 errors are the same. /// https://stackoverflow.com/a/65618681 From 6f5862b45f532d8b0886130039fbc6950861375a Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 12:26:39 +0000 Subject: [PATCH 08/12] Formatting --- src/accumulators.rs | 2 +- src/binary_tree.rs | 2 +- src/binary_tree/height.rs | 3 +- src/binary_tree/node_content/full_node.rs | 75 ++++++++++--------- src/binary_tree/node_content/hidden_node.rs | 30 +++++--- src/binary_tree/path_siblings.rs | 2 +- src/binary_tree/tree_builder.rs | 4 +- .../tree_builder/multi_threaded.rs | 5 +- src/binary_tree/utils.rs | 10 ++- src/cli.rs | 3 +- src/entity/entities_parser.rs | 11 ++- src/inclusion_proof.rs | 6 +- src/kdf.rs | 4 +- src/max_liability.rs | 5 +- src/max_thread_count.rs | 1 - src/percentage.rs | 3 +- src/read_write_utils.rs | 4 +- 17 files changed, 101 insertions(+), 69 deletions(-) diff --git a/src/accumulators.rs b/src/accumulators.rs index 03cc4abd..2a2fbba8 100644 --- a/src/accumulators.rs +++ b/src/accumulators.rs @@ -76,7 +76,7 @@ pub enum AccumulatorType { impl fmt::Display for AccumulatorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - AccumulatorType::NdmSmt => write!(f, "NDM-SMT") + AccumulatorType::NdmSmt => write!(f, "NDM-SMT"), } } } diff --git a/src/binary_tree.rs b/src/binary_tree.rs index 37d9df5d..d87e6428 100644 --- a/src/binary_tree.rs +++ b/src/binary_tree.rs @@ -43,7 +43,7 @@ pub use node_content::{FullNodeContent, HiddenNodeContent, Mergeable}; mod tree_builder; pub use tree_builder::multi_threaded; pub use tree_builder::{ - single_threaded, InputLeafNode, TreeBuildError, BinaryTreeBuilder, MIN_STORE_DEPTH, + single_threaded, BinaryTreeBuilder, InputLeafNode, TreeBuildError, MIN_STORE_DEPTH, }; mod path_siblings; diff --git a/src/binary_tree/height.rs b/src/binary_tree/height.rs index 5b2bf392..4691b069 100644 --- a/src/binary_tree/height.rs +++ b/src/binary_tree/height.rs @@ -46,7 +46,8 @@ impl Height { /// panics if `int` is greater than [MAX_HEIGHT] or less than /// [MIN_HEIGHT]. /// - /// Note that if we try to implement the From trait then we have a collision. + /// Note that if we try to implement the From trait then we have a + /// collision. pub fn expect_from(int: u8) -> Self { match Height::try_from(int) { Err(e) => panic!("{}", e), diff --git a/src/binary_tree/node_content/full_node.rs b/src/binary_tree/node_content/full_node.rs index 708acda5..37a8fa2c 100644 --- a/src/binary_tree/node_content/full_node.rs +++ b/src/binary_tree/node_content/full_node.rs @@ -1,23 +1,26 @@ -//! An implementation of the generic content type required for [crate][binary_tree][`Node`]. +//! An implementation of the generic content type required for +//! [crate][binary_tree][`Node`]. //! -//! This implementation contains the values in the [super][hidden_node] implementation -//! (Pedersen commitment & hash) plus the additional private values (blinding factor and plain text -//! liability). The private values are included so that the total blinding factor & liability sum -//! can be accessed after tree construction. This node type should ideally not be used in -//! the serialization process since it will increase the final byte size and expose the secret -//! values. +//! This implementation contains the values in the [super][hidden_node] +//! implementation (Pedersen commitment & hash) plus the additional private +//! values (blinding factor and plain text liability). The private values are +//! included so that the total blinding factor & liability sum can be accessed +//! after tree construction. This node type should ideally not be used in +//! the serialization process since it will increase the final byte size and +//! expose the secret values. //! -//! All the logic related to how to construct the content of a node is held in this file. +//! All the logic related to how to construct the content of a node is held in +//! this file. use crate::binary_tree::{Coordinate, Mergeable}; -use crate::secret::Secret; use crate::entity::EntityId; use crate::hasher::Hasher; +use crate::secret::Secret; use bulletproofs::PedersenGens; use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; use primitive_types::H256; -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use super::HiddenNodeContent; @@ -64,12 +67,13 @@ impl FullNodeContent { /// Constructor. /// - /// The secret `liability` realistically does not need more space than 64 bits because it is - /// generally used for monetary value or head count, also the Bulletproofs library requires - /// the value to be u64. - /// The `blinding_factor` needs to have a larger sized storage space (256 bits) ensure promised - /// n-bit security of the commitments; it can be enlarged to 512 bits if need be as this size - /// is supported by the underlying `Scalar` constructors. + /// The secret `liability` realistically does not need more space than 64 + /// bits because it is generally used for monetary value or head count, + /// also the Bulletproofs library requires the value to be u64. + /// The `blinding_factor` needs to have a larger sized storage space (256 + /// bits) ensure promised n-bit security of the commitments; it can be + /// enlarged to 512 bits if need be as this size is supported by the + /// underlying `Scalar` constructors. #[allow(dead_code)] pub fn new_leaf( liability: u64, @@ -80,7 +84,8 @@ impl FullNodeContent { // Scalar expects bytes to be in little-endian let blinding_factor_scalar = Scalar::from_bytes_mod_order(blinding_factor.into()); - // Compute the Pedersen commitment to the liability `P = g_1^liability * g_2^blinding_factor` + // Compute the Pedersen commitment to the liability `P = g_1^liability * + // g_2^blinding_factor` let commitment = PedersenGens::default().commit(Scalar::from(liability), blinding_factor_scalar); @@ -104,15 +109,19 @@ impl FullNodeContent { /// Create the content for a new padding node. /// - /// The hash requires the node's coordinate as well as a salt. Since the liability of a - /// padding node is 0 only the blinding factor is required for the Pedersen commitment. + /// The hash requires the node's coordinate as well as a salt. Since the + /// liability of a padding node is 0 only the blinding factor is + /// required for the Pedersen commitment. #[allow(dead_code)] pub fn new_pad(blinding_factor: Secret, coord: &Coordinate, salt: Secret) -> FullNodeContent { let liability = 0u64; - // TODO need to think about whether this is okay or if modulo is going to break things. Maybe we should just have the kdf such that it outputs within the correct bounds + // TODO need to think about whether this is okay or if modulo is going to break + // things. Maybe we should just have the kdf such that it outputs within the + // correct bounds let blinding_factor_scalar = Scalar::from_bytes_mod_order(blinding_factor.into()); - // Compute the Pedersen commitment to the liability `P = g_1^liability * g_2^blinding_factor` + // Compute the Pedersen commitment to the liability `P = g_1^liability * + // g_2^blinding_factor` let commitment = PedersenGens::default().commit(Scalar::from(liability), blinding_factor_scalar); @@ -150,9 +159,11 @@ impl FullNodeContent { impl Mergeable for FullNodeContent { /// Returns the parent node content by merging two child node contents. /// - /// The value and blinding factor of the parent are the sums of the two children respectively. - /// The commitment of the parent is the homomorphic sum of the two children. - /// The hash of the parent is computed by hashing the concatenated commitments and hashes of two children. + /// The value and blinding factor of the parent are the sums of the two + /// children respectively. The commitment of the parent is the + /// homomorphic sum of the two children. The hash of the parent is + /// computed by hashing the concatenated commitments and hashes of two + /// children. fn merge(left_sibling: &Self, right_sibling: &Self) -> Self { let parent_liability = left_sibling.liability + right_sibling.liability; let parent_blinding_factor = left_sibling.blinding_factor + right_sibling.blinding_factor; @@ -212,23 +223,15 @@ mod tests { let blinding_factor_1 = 7u64.into(); let entity_id_1 = EntityId::from_str("some entity 1").unwrap(); let entity_salt_1 = 13u64.into(); - let node_1 = FullNodeContent::new_leaf( - liability_1, - blinding_factor_1, - entity_id_1, - entity_salt_1, - ); + let node_1 = + FullNodeContent::new_leaf(liability_1, blinding_factor_1, entity_id_1, entity_salt_1); let liability_2 = 21u64; let blinding_factor_2 = 27u64.into(); let entity_id_2 = EntityId::from_str("some entity 2").unwrap(); let entity_salt_2 = 23u64.into(); - let node_2 = FullNodeContent::new_leaf( - liability_2, - blinding_factor_2, - entity_id_2, - entity_salt_2, - ); + let node_2 = + FullNodeContent::new_leaf(liability_2, blinding_factor_2, entity_id_2, entity_salt_2); FullNodeContent::merge(&node_1, &node_2); } diff --git a/src/binary_tree/node_content/hidden_node.rs b/src/binary_tree/node_content/hidden_node.rs index f24e402f..d7e78442 100644 --- a/src/binary_tree/node_content/hidden_node.rs +++ b/src/binary_tree/node_content/hidden_node.rs @@ -1,6 +1,8 @@ -//! An implementation of the content generic type required for [crate][binary_tree][`Node`]. +//! An implementation of the content generic type required for +//! [crate][binary_tree][`Node`]. //! -//! This implementation contains only the Pedersen commitment and the hash as fields in the struct. +//! This implementation contains only the Pedersen commitment and the hash as +//! fields in the struct. use bulletproofs::PedersenGens; use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; @@ -38,12 +40,13 @@ impl HiddenNodeContent { /// Create the content for a leaf node. /// - /// The secret `value` realistically does not need more space than 64 bits because it is - /// generally used for monetary value or head count, also the Bulletproofs library requires - /// the value to be u64. - /// The `blinding_factor` needs to have a larger sized storage space (256 bits) ensure promised - /// n-bit security of the commitments; it can be enlarged to 512 bits if need be as this size - /// is supported by the underlying `Scalar` constructors. + /// The secret `value` realistically does not need more space than 64 bits + /// because it is generally used for monetary value or head count, also + /// the Bulletproofs library requires the value to be u64. + /// The `blinding_factor` needs to have a larger sized storage space (256 + /// bits) ensure promised n-bit security of the commitments; it can be + /// enlarged to 512 bits if need be as this size is supported by the + /// underlying `Scalar` constructors. #[allow(dead_code)] pub fn new_leaf( liability: u64, @@ -51,7 +54,8 @@ impl HiddenNodeContent { entity_id: EntityId, entity_salt: Secret, ) -> HiddenNodeContent { - // Compute the Pedersen commitment to the value `P = g_1^value * g_2^blinding_factor` + // Compute the Pedersen commitment to the value `P = g_1^value * + // g_2^blinding_factor` let commitment = PedersenGens::default().commit( Scalar::from(liability), Scalar::from_bytes_mod_order(blinding_factor.into()), @@ -72,8 +76,9 @@ impl HiddenNodeContent { /// Create the content for a new padding node. /// - /// The hash requires the node's coordinate as well as a salt. Since the liability of a - /// padding node is 0 only the blinding factor is required for the Pedersen commitment. + /// The hash requires the node's coordinate as well as a salt. Since the + /// liability of a padding node is 0 only the blinding factor is + /// required for the Pedersen commitment. #[allow(dead_code)] pub fn new_pad(blinding_factor: Secret, coord: &Coordinate, salt: Secret) -> HiddenNodeContent { // Compute the Pedersen commitment to 0 `P = g_1^0 * g_2^blinding_factor` @@ -111,7 +116,8 @@ impl Mergeable for HiddenNodeContent { /// Returns the parent node content by merging two child node contents. /// /// The commitment of the parent is the homomorphic sum of the two children. - /// The hash of the parent is computed by hashing the concatenated commitments and hashes of two children. + /// The hash of the parent is computed by hashing the concatenated + /// commitments and hashes of two children. fn merge(left_sibling: &Self, right_sibling: &Self) -> Self { let parent_commitment = left_sibling.commitment + right_sibling.commitment; diff --git a/src/binary_tree/path_siblings.rs b/src/binary_tree/path_siblings.rs index df326ee4..cdfc9943 100644 --- a/src/binary_tree/path_siblings.rs +++ b/src/binary_tree/path_siblings.rs @@ -22,7 +22,7 @@ //! [super][tree_builder][single_threaded]. use super::{BinaryTree, Coordinate, Mergeable, Node, MIN_STORE_DEPTH}; -use crate::{utils::Consume, binary_tree::multi_threaded::RecursionParamsBuilder}; +use crate::{binary_tree::multi_threaded::RecursionParamsBuilder, utils::Consume}; use std::fmt::Debug; diff --git a/src/binary_tree/tree_builder.rs b/src/binary_tree/tree_builder.rs index 89d1797d..4123089a 100644 --- a/src/binary_tree/tree_builder.rs +++ b/src/binary_tree/tree_builder.rs @@ -405,7 +405,9 @@ mod tests { fn err_when_parent_builder_height_not_set() { let height = Height::expect_from(4); let leaf_nodes = full_bottom_layer(&height); - let res = BinaryTreeBuilder::new().with_leaf_nodes(leaf_nodes).height(); + let res = BinaryTreeBuilder::new() + .with_leaf_nodes(leaf_nodes) + .height(); // cannot use assert_err because it requires Func to have the Debug trait assert_err_simple!(res, Err(TreeBuildError::NoHeightProvided)); diff --git a/src/binary_tree/tree_builder/multi_threaded.rs b/src/binary_tree/tree_builder/multi_threaded.rs index 37b1375d..82a19c05 100644 --- a/src/binary_tree/tree_builder/multi_threaded.rs +++ b/src/binary_tree/tree_builder/multi_threaded.rs @@ -600,7 +600,7 @@ fn max_nodes_to_store(num_leaf_nodes: u64, height: &Height) -> u64 { // TODO recursive function err - NOT x-coord max multiple of 2 // TODO recursive function err - max - min must be power of 2 -#[cfg(any(test, fuzzing))] +#[cfg(any(test, feature = "fuzzing"))] pub(crate) mod tests { use std::str::FromStr; @@ -671,7 +671,8 @@ pub(crate) mod tests { assert_err!( res, Err(TreeBuildError::TooManyLeaves { - // TODO does assert_err work for these values? If we change the values does the test pass? + // TODO does assert_err work for these values? If we change the values does the test + // pass? given: leaf_nodes, max: max_nodes, }) diff --git a/src/binary_tree/utils.rs b/src/binary_tree/utils.rs index 7f3d52d9..5b49ac67 100644 --- a/src/binary_tree/utils.rs +++ b/src/binary_tree/utils.rs @@ -3,11 +3,11 @@ // ------------------------------------------------------------------------------------------------- // Test utils for sub-modules. -#[cfg(any(test, fuzzing))] +#[cfg(any(test, feature = "fuzzing"))] pub mod test_utils { use super::super::*; - use primitive_types::H256; use crate::hasher::Hasher; + use primitive_types::H256; #[derive(Clone, Debug, PartialEq, Serialize)] pub struct TestContent { @@ -46,7 +46,11 @@ pub mod test_utils { } } - pub fn random_leaf_nodes(num_leaf_nodes: u64, height: &Height, seed: u64) -> Vec> { + pub fn random_leaf_nodes( + num_leaf_nodes: u64, + height: &Height, + seed: u64, + ) -> Vec> { use crate::accumulators::RandomXCoordGenerator; let mut leaf_nodes = Vec::>::new(); diff --git a/src/cli.rs b/src/cli.rs index 55198377..b0cd670a 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -91,7 +91,8 @@ pub enum Command { /// Verify an inclusion proof. /// - /// Note: the root hash of the tree is logged out on tree creation (an info-level log). + /// Note: the root hash of the tree is logged out on tree creation (an + /// info-level log). VerifyInclusionProof { /// File path for the serialized inclusion proof file. #[arg(short, long)] diff --git a/src/entity/entities_parser.rs b/src/entity/entities_parser.rs index dc5395fe..27a91a74 100644 --- a/src/entity/entities_parser.rs +++ b/src/entity/entities_parser.rs @@ -182,8 +182,8 @@ pub enum EntitiesParserError { #[cfg(test)] mod tests { use super::*; - use std::path::Path; use crate::utils::test_utils::assert_err; + use std::path::Path; #[test] fn parser_csv_file_happy_case() { @@ -224,8 +224,13 @@ mod tests { fn fail_when_unsupproted_file_type() { let this_file = std::file!(); let unsupported_path = PathBuf::from(this_file); - let res = EntitiesParser::new().with_path(unsupported_path).parse_file(); - assert_err!(res, Err(EntitiesParserError::UnsupportedFileType { ext: _ })); + let res = EntitiesParser::new() + .with_path(unsupported_path) + .parse_file(); + assert_err!( + res, + Err(EntitiesParserError::UnsupportedFileType { ext: _ }) + ); } #[test] diff --git a/src/inclusion_proof.rs b/src/inclusion_proof.rs index cc5c3588..d5ecdff2 100644 --- a/src/inclusion_proof.rs +++ b/src/inclusion_proof.rs @@ -221,7 +221,11 @@ impl InclusionProof { /// An error is returned if /// 1. [bincode] fails to serialize the file. /// 2. There is an issue opening or writing the file. - pub fn serialize(&self, entity_id: &EntityId, dir: PathBuf) -> Result { + pub fn serialize( + &self, + entity_id: &EntityId, + dir: PathBuf, + ) -> Result { let mut file_name = entity_id.to_string(); file_name.push('.'); file_name.push_str(SERIALIZED_PROOF_EXTENSION); diff --git a/src/kdf.rs b/src/kdf.rs index 6473db6a..0a2180a6 100644 --- a/src/kdf.rs +++ b/src/kdf.rs @@ -5,8 +5,8 @@ //! The HKDF is split into 2 separate functions: extract & expand (both of which //! utilize HMAC). //! -//! `HKDF(salt, IKM, info, length) = HKDF-Expand(HKDF-Extract(salt, IKM), info, length)` -//! where `HKDF-Extract(salt, IKM) = HMAC(key=salt, message=IKM)` +//! `HKDF(salt, IKM, info, length) = HKDF-Expand(HKDF-Extract(salt, IKM), info, +//! length)` where `HKDF-Extract(salt, IKM) = HMAC(key=salt, message=IKM)` //! //! For more information check out these resources: //! - [Cryptographic Extraction and Key Derivation: The HKDF Scheme](https://eprint.iacr.org/2010/264.pdf) diff --git a/src/max_liability.rs b/src/max_liability.rs index d4cfb17c..a8cfe36a 100644 --- a/src/max_liability.rs +++ b/src/max_liability.rs @@ -138,7 +138,10 @@ mod tests { assert_eq!(logarithm_of_input_truncated, 6u8); let nearest_pow_2_greater_than = 8u8; - assert_eq!(max_liability.as_range_proof_upper_bound_bit_length(), nearest_pow_2_greater_than); + assert_eq!( + max_liability.as_range_proof_upper_bound_bit_length(), + nearest_pow_2_greater_than + ); } // TODO test more cases for the upper_bound_bit_length function diff --git a/src/max_thread_count.rs b/src/max_thread_count.rs index a5494ad4..07698e67 100644 --- a/src/max_thread_count.rs +++ b/src/max_thread_count.rs @@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize}; pub const DEFAULT_MAX_THREAD_COUNT: u8 = 4; /// Abstraction for the max number of threads. -/// #[doc = include_str!("./shared_docs/max_thread_count.md")] /// /// Example: diff --git a/src/percentage.rs b/src/percentage.rs index 5e94b729..d2179624 100644 --- a/src/percentage.rs +++ b/src/percentage.rs @@ -17,7 +17,8 @@ impl Percentage { /// Returns a new `Percentage` with the given value. /// Panics if the value is greater than 100. /// - /// Note that if we try to implement the From trait then we have a collision. + /// Note that if we try to implement the From trait then we have a + /// collision. pub fn expect_from(value: u8) -> Percentage { match Percentage::try_from(value) { Err(e) => panic!("{}", e), diff --git a/src/read_write_utils.rs b/src/read_write_utils.rs index 00f083fc..8ca5bda7 100644 --- a/src/read_write_utils.rs +++ b/src/read_write_utils.rs @@ -176,7 +176,9 @@ pub fn check_deserialization_path( }) } } - None => Err(ReadWriteError::NoFileExtension(path.clone().into_os_string())), + None => Err(ReadWriteError::NoFileExtension( + path.clone().into_os_string(), + )), } } From 26b604565abc46bb17ff9e2f56041ce190ea1f4e Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 12:51:34 +0000 Subject: [PATCH 09/12] Adjust some doc formatting --- src/dapol_config.rs | 18 +++++++++--------- src/dapol_tree.rs | 26 ++++++++++++-------------- src/lib.rs | 7 +++++-- src/utils.rs | 2 ++ 4 files changed, 28 insertions(+), 25 deletions(-) diff --git a/src/dapol_config.rs b/src/dapol_config.rs index 6b0ec1e0..dafb5061 100644 --- a/src/dapol_config.rs +++ b/src/dapol_config.rs @@ -11,7 +11,7 @@ use crate::{ }; use crate::{salt, secret}; -/// Configuration needed to construct a [crate][DapolTree]. +/// Configuration needed to construct a [DapolTree]. /// /// The config is defined by a struct. A builder pattern is used to construct /// the config, but it can also be constructed by deserializing a file. @@ -21,7 +21,7 @@ use crate::{salt, secret}; #[doc = include_str!("../examples/dapol_config_example.toml")] /// ``` /// -/// Example of how to use the builder to construct a [crate][DapolTree]: +/// Example of how to use the builder to construct a [DapolTree]: /// ``` /// use std::{path::PathBuf, str::FromStr}; /// use dapol::{ @@ -55,7 +55,7 @@ use crate::{salt, secret}; /// .unwrap(); /// ``` /// -/// Example of how to use a config file to construct a [crate][DapolTree]: +/// Example of how to use a config file to construct a [DapolTree]: /// ``` /// use std::{path::PathBuf, str::FromStr}; /// use dapol::DapolConfig; @@ -66,8 +66,8 @@ use crate::{salt, secret}; /// DapolConfig::deserialize(config_file_path).unwrap(); /// ``` /// -/// Note that you can also construct a [crate][DapolTree] by calling the -/// constructor directly (see [crate][DapolTree]). +/// Note that you can also construct a [DapolTree] by calling the +/// constructor directly (see [DapolTree]). #[derive(Deserialize, Debug, Builder, PartialEq)] #[builder(build_fn(skip))] pub struct DapolConfig { @@ -346,7 +346,7 @@ impl DapolConfig { Ok(config) } - /// Try to construct a [crate][DapolTree] from the config. + /// Try to construct a [DapolTree] from the config. // STENT TODO rather call this create_tree #[cfg(any(test, feature = "testing"))] pub fn parse(self) -> Result { @@ -398,7 +398,7 @@ impl DapolConfig { Ok(dapol_tree) } - /// Try to construct a [crate][DapolTree] from the config. + /// Try to construct a [DapolTree] from the config. // STENT TODO rather call this create_tree #[cfg(not(any(test, feature = "testing")))] pub fn parse(self) -> Result { @@ -433,7 +433,7 @@ impl DapolConfig { .log_on_err()?) } - /// Open and parse the secrets file, returning a [crate][Secret]. + /// Open and parse the secrets file, returning a [Secret]. /// /// An error is returned if: /// 1. The path is None (i.e. was not set). @@ -505,7 +505,7 @@ struct DapolSecrets { // ------------------------------------------------------------------------------------------------- // Errors. -/// Errors encountered when parsing [crate][DapolConfig]. +/// Errors encountered when parsing [DapolConfig]. #[derive(thiserror::Error, Debug)] pub enum DapolConfigError { #[error("Entities parsing failed while trying to parse DAPOL config")] diff --git a/src/dapol_tree.rs b/src/dapol_tree.rs index 6ee2bf64..df6ef02e 100644 --- a/src/dapol_tree.rs +++ b/src/dapol_tree.rs @@ -8,17 +8,16 @@ use std::path::PathBuf; use crate::{ accumulators::{Accumulator, AccumulatorType, NdmSmt, NdmSmtError}, read_write_utils::{self}, - secret, utils::LogOnErr, AggregationFactor, Entity, EntityId, Height, InclusionProof, MaxLiability, MaxThreadCount, Salt, Secret, }; -const SERIALIZED_TREE_EXTENSION: &str = "dapoltree"; -const SERIALIZED_TREE_FILE_PREFIX: &str = "proof_of_liabilities_merkle_sum_tree_"; +pub const SERIALIZED_TREE_EXTENSION: &str = "dapoltree"; +pub const SERIALIZED_TREE_FILE_PREFIX: &str = "proof_of_liabilities_merkle_sum_tree_"; -const SERIALIZED_ROOT_PUB_FILE_PREFIX: &str = "public_root_data_"; -const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "secret_root_data_"; +pub const SERIALIZED_ROOT_PUB_FILE_PREFIX: &str = "public_root_data_"; +pub const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "secret_root_data_"; // ------------------------------------------------------------------------------------------------- // Main struct. @@ -27,7 +26,7 @@ const SERIALIZED_ROOT_PVT_FILE_PREFIX: &str = "secret_root_data_"; /// /// This is the top-most module in the hierarchy of the [dapol] crate. /// -/// It is recommended that one use [crate][DapolConfig] to construct the +/// It is recommended that one use [DapolConfig](crate::DapolConfig) to construct the /// tree, which has extra sanity checks on the inputs and more ways to set /// the parameters. But there is also a `new` function for direct construction. #[derive(Debug, Serialize, Deserialize)] @@ -89,8 +88,7 @@ impl DapolTree { #[doc = include_str!("./shared_docs/salt_s.md")] /// - `max_liability`: If not set then a default value is used. #[doc = include_str!("./shared_docs/max_liability.md")] - /// - `height`: If not set the [default height] will be used - /// [crate][Height]. + /// - `height`: If not set the [default height] will be used. #[doc = include_str!("./shared_docs/height.md")] /// - `max_thread_count`: If not set the max parallelism of the underlying /// machine will be used. @@ -345,7 +343,7 @@ impl DapolTree { self.accumulator.height() } - /// Mapping of [crate][EntityId] to x-coord on the bottom layer of the tree. + /// Mapping of [EntityId](crate::EntityId) to x-coord on the bottom layer of the tree. /// /// If the underlying accumulator is an NDM-SMT then a hashmap is returned /// otherwise None is returned. @@ -441,7 +439,7 @@ impl DapolTree { /// `path`. 2. Non-existing directory: in this case all dirs in the path /// are created, and a default file name is appended. /// 3. File in existing dir: in this case the extension is checked to be - /// ".[SERIALIZED_TREE_EXTENSION]", then `path` is returned. + /// [SERIALIZED_TREE_EXTENSION], then `path` is returned. /// 4. File in non-existing dir: dirs in the path are created and the file /// extension is checked. /// @@ -507,7 +505,7 @@ impl DapolTree { /// `path`. 2. Non-existing directory: in this case all dirs in the path /// are created, and a default file name is appended. /// 3. File in existing dir: in this case the extension is checked to be - /// ".[SERIALIZED_TREE_EXTENSION]", then `path` is returned. + /// [SERIALIZED_TREE_EXTENSION], then `path` is returned. /// 4. File in non-existing dir: dirs in the path are created and the file /// extension is checked. /// @@ -589,7 +587,7 @@ impl DapolTree { /// An error is logged and returned if /// 1. The file cannot be opened. /// 2. The [bincode] deserializer fails. - /// 3. The file extension is not ".[SERIALIZED_TREE_EXTENSION]" + /// 3. The file extension is not [SERIALIZED_TREE_EXTENSION] pub fn deserialize(path: PathBuf) -> Result { debug!( "Deserializing DapolTree from file {:?}", @@ -613,7 +611,7 @@ impl DapolTree { /// An error is logged and returned if /// 1. The file cannot be opened. /// 2. The [serde_json] deserializer fails. - /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" + /// 3. The file extension is not [SERIALIZED_ROOT_PUB_FILE_PREFIX] pub fn deserialize_public_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; @@ -630,7 +628,7 @@ impl DapolTree { /// An error is logged and returned if /// 1. The file cannot be opened. /// 2. The [serde_json] deserializer fails. - /// 3. The file extension is not ".[SERIALIZED_ROOT_PUB_FILE_PREFIX]" + /// 3. The file extension is not [SERIALIZED_ROOT_PUB_FILE_PREFIX] pub fn deserialize_secret_root_data(path: PathBuf) -> Result { read_write_utils::check_deserialization_path(&path, "json")?; diff --git a/src/lib.rs b/src/lib.rs index a90b3ddb..537d3a03 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -62,7 +62,7 @@ //! ``` #![doc = include_str!("../examples/main.rs")] //! ``` -//! +//! //! ### Features //! //! #### Fuzzing @@ -81,7 +81,10 @@ pub mod read_write_utils; pub mod utils; mod dapol_tree; -pub use dapol_tree::{DapolTree, DapolTreeError, RootPublicData, RootSecretData}; +pub use dapol_tree::{ + DapolTree, DapolTreeError, RootPublicData, RootSecretData, SERIALIZED_ROOT_PUB_FILE_PREFIX, + SERIALIZED_ROOT_PVT_FILE_PREFIX, SERIALIZED_TREE_EXTENSION, SERIALIZED_TREE_FILE_PREFIX, +}; pub use curve25519_dalek_ng::{ristretto::RistrettoPoint, scalar::Scalar}; diff --git a/src/utils.rs b/src/utils.rs index e6cf3c57..62772886 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,7 @@ //! Utilities used across the whole crate. +// TODO add documentation for traits + // ------------------------------------------------------------------------------------------------- // Logging. From 0df4f3e2c487f11deef38a5dcbc231d29cbdb171 Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 13:59:18 +0000 Subject: [PATCH 10/12] Fix smol bug in benches where dir does not exist --- benches/criterion_benches.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/benches/criterion_benches.rs b/benches/criterion_benches.rs index cb16da1c..943c1add 100644 --- a/benches/criterion_benches.rs +++ b/benches/criterion_benches.rs @@ -289,6 +289,7 @@ pub fn bench_generate_proof(c: &mut Criterion) { let src_dir = env!("CARGO_MANIFEST_DIR"); let target_dir = Path::new(&src_dir).join("target"); let dir = target_dir.join("serialized_proofs"); + std::fs::create_dir_all(dir)?; let path = proof .expect("Proof should be set") .serialize(entity_id, dir) From 965ff11255b7687dab7dc76406cd9dfa9ba9fc36 Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 14:17:39 +0000 Subject: [PATCH 11/12] Fix smol compilation errors with benches build --- benches/criterion_benches.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/benches/criterion_benches.rs b/benches/criterion_benches.rs index 943c1add..b2f57b47 100644 --- a/benches/criterion_benches.rs +++ b/benches/criterion_benches.rs @@ -289,7 +289,7 @@ pub fn bench_generate_proof(c: &mut Criterion) { let src_dir = env!("CARGO_MANIFEST_DIR"); let target_dir = Path::new(&src_dir).join("target"); let dir = target_dir.join("serialized_proofs"); - std::fs::create_dir_all(dir)?; + std::fs::create_dir_all(dir.clone()).unwrap(); let path = proof .expect("Proof should be set") .serialize(entity_id, dir) @@ -387,7 +387,7 @@ pub fn bench_verify_proof(c: &mut Criterion) { format!("height_{}/num_entities_{}", h.as_u32(), n), ), |bench| { - bench.iter(|| proof.verify(root_hash)); + bench.iter(|| proof.verify(*root_hash)); }, ); } From 009371c47968d8f3c189c518f117180349dcf75f Mon Sep 17 00:00:00 2001 From: Stent Date: Sat, 20 Jan 2024 14:18:03 +0000 Subject: [PATCH 12/12] Bump version to 0.3 --- CHANGELOG.md | 9 +++++++++ Cargo.toml | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 84272230..ffbf4dbe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## v0.3.0 (2024-01-20) + +- Adjust API to read better using DapolTree instead of Accumulator [36dd58f](https://github.com/silversixpence-crypto/dapol/commit/36dd58fcd9cd2100ac7a1c4a7010faab3397770f). Also included in this change: + - New Salt abstraction type [5c8a580](https://github.com/silversixpence-crypto/dapol/commit/5c8a580c5250a337592951234879852a8f1df285) + - New MaxLiability abstraction type [800b0a9](https://github.com/silversixpence-crypto/dapol/commit/800b0a95b67ad7b4badf4c089b2cfc10d400283b) + - Deserialize Salt & Secret using FromStr [169cfa5](https://github.com/silversixpence-crypto/dapol/commit/169cfa532e86e3f27d675764d8456fc3e3270564) + - Fix bug with Bulletproofs bit length [f2a2498](https://github.com/silversixpence-crypto/dapol/commit/f2a2498120fa35ecf589f43bc660d218ae2861ad) +- Add benchmark graphs to readme [4a73d3c](https://github.com/silversixpence-crypto/dapol/commit/4a73d3cb8284f7f60659a376fa90c5714368e627) + ## v0.2.0 (2023-12-27) - Add max_thread_count API parameter [62be10c](https://github.com/silversixpence-crypto/dapol/commit/62be10c9393b2b7e2a4feeedde53fd8a793cbf30) diff --git a/Cargo.toml b/Cargo.toml index ec18117b..1cb65b2b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dapol" -version = "0.2.0" +version = "0.3.0" authors = ["Stenton Mayne "] edition = "2021" description = "DAPOL+ Proof of Liabilities protocol"