jf_merkle_tree/hasher.rs
1// Copyright (c) 2023 Espresso Systems (espressosys.com)
2// This file is part of the Jellyfish library.
3
4// You should have received a copy of the MIT License
5// along with the Jellyfish library. If not, see <https://mit-license.org/>.
6
7//! A convenience wrapper [`HasherMerkleTree`] to instantiate [`MerkleTree`] for any [RustCrypto-compatible](https://github.com/RustCrypto/hashes) hash function.
8//!
9//! ```
10//! # use jf_merkle_tree::errors::MerkleTreeError;
11//! use jf_merkle_tree::{hasher::HasherMerkleTree, AppendableMerkleTreeScheme, MerkleTreeScheme};
12//! use sha2::Sha256;
13//!
14//! # fn main() -> Result<(), MerkleTreeError> {
15//! let my_data = [1, 2, 3, 4, 5, 6, 7, 8, 9];
16//!
17//! // payload type is `usize`, hash function is `Sha256`.
18//! let mt = HasherMerkleTree::<Sha256, usize>::from_elems(Some(2), &my_data)?;
19//!
20//! let commitment = mt.commitment();
21//! let (val, proof) = mt.lookup(2).expect_ok()?;
22//! assert_eq!(val, &3);
23//! assert!(HasherMerkleTree::<Sha256, usize>::verify(commitment, 2, val, proof)?.is_ok());
24//! # Ok(())
25//! # }
26//! ```
27//!
28//! [`HasherMerkleTree`] requires the `std` feature for your hasher, which is
29//! enabled by default. Example:
30//! ```toml
31//! [dependencies]
32//! sha2 = "0.10"
33//! ```
34//!
35//! Use [`GenericHasherMerkleTree`] if you prefer to specify your own `ARITY`
36//! and node [`Index`] types.
37
38// clippy is freaking out about `HasherNode` and this is the only thing I
39// could do to stop it
40#![allow(clippy::non_canonical_partial_ord_impl)]
41
42use super::{append_only::MerkleTree, DigestAlgorithm, Element, Index};
43use crate::{
44 errors::MerkleTreeError,
45 prelude::{INTERNAL_HASH_DOM_SEP, LEAF_HASH_DOM_SEP},
46};
47use ark_serialize::{
48 CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
49 Write,
50};
51use ark_std::string::ToString;
52use derivative::Derivative;
53use digest::{
54 crypto_common::{generic_array::ArrayLength, Output},
55 Digest, OutputSizeUser,
56};
57use tagged_base64::tagged;
58
59/// Merkle tree generic over [`Digest`] hasher `H`.
60///
61/// It's a trinary tree whose nodes are indexed by [`u64`].
62/// - `H` is a [RustCrypto-compatible](https://github.com/RustCrypto/hashes)
63/// hash function.
64/// - `E` is a [`Element`] payload data type for the Merkle tree.
65pub type HasherMerkleTree<H, E> = GenericHasherMerkleTree<H, E, u64, 3>;
66
67/// Like [`HasherMerkleTree`] except with additional parameters.
68///
69/// Additional parameters beyond [`HasherMerkleTree`]:
70/// - `I` is a [`Index`] data type that impls [`From<u64>`]. (eg. [`u64`],
71/// [`Field`](ark_ff::Field), etc.)
72/// - `ARITY` is a const generic. (eg. 2 for a binary tree, 3 for a trinary
73/// tree, etc.)
74pub type GenericHasherMerkleTree<H, E, I, const ARITY: usize> =
75 MerkleTree<E, HasherDigestAlgorithm, I, ARITY, HasherNode<H>>;
76
77/// Convenience trait and blanket impl for downstream trait bounds.
78///
79/// Useful for downstream code that's generic over [`Digest`] hasher `H`.
80///
81/// # Example
82///
83/// Do this:
84/// ```
85/// # use jf_merkle_tree::{hasher::HasherMerkleTree, AppendableMerkleTreeScheme};
86/// # use jf_merkle_tree::hasher::HasherDigest;
87/// fn generic_over_hasher<H>()
88/// where
89/// H: HasherDigest,
90/// {
91/// let my_data = [1, 2, 3, 4, 5, 6, 7, 8, 9];
92/// let mt = HasherMerkleTree::<H, usize>::from_elems(None, &my_data).unwrap();
93/// }
94/// ```
95///
96/// Instead of this:
97/// ```
98/// # use digest::{crypto_common::generic_array::ArrayLength, Digest, OutputSizeUser};
99/// # use ark_serialize::Write;
100/// # use jf_merkle_tree::{hasher::HasherMerkleTree, AppendableMerkleTreeScheme};
101/// # use jf_merkle_tree::hasher::HasherDigest;
102/// fn generic_over_hasher<H>()
103/// where
104/// H: Digest + Write + Send + Sync,
105/// <<H as OutputSizeUser>::OutputSize as ArrayLength<u8>>::ArrayType: Copy,
106/// {
107/// let my_data = [1, 2, 3, 4, 5, 6, 7, 8, 9];
108/// let mt = HasherMerkleTree::<H, usize>::from_elems(None, &my_data).unwrap();
109/// }
110/// ```
111///
112/// Note that the complex trait bound for [`Copy`] is necessary:
113/// ```compile_fail
114/// # use digest::{crypto_common::generic_array::ArrayLength, Digest, OutputSizeUser};
115/// # use ark_serialize::Write;
116/// # use jf_merkle_tree::{hasher::HasherMerkleTree, AppendableMerkleTreeScheme};
117/// # use jf_merkle_tree::hasher::HasherDigest;
118/// fn generic_over_hasher<H>()
119/// where
120/// H: Digest + Write + Send + Sync,
121/// {
122/// let my_data = [1, 2, 3, 4, 5, 6, 7, 8, 9];
123/// let mt = HasherMerkleTree::<H, usize>::from_elems(None, &my_data).unwrap();
124/// }
125/// ```
126pub trait HasherDigest: Digest<OutputSize = Self::OutSize> + Write + Send + Sync {
127 /// Type for the output size
128 type OutSize: ArrayLength<u8, ArrayType = Self::ArrayType>;
129 /// Type for the array
130 type ArrayType: Copy;
131}
132impl<T> HasherDigest for T
133where
134 T: Digest + Write + Send + Sync,
135 <T::OutputSize as ArrayLength<u8>>::ArrayType: Copy,
136{
137 type OutSize = T::OutputSize;
138 type ArrayType = <<T as HasherDigest>::OutSize as ArrayLength<u8>>::ArrayType;
139}
140
141/// A struct that impls [`DigestAlgorithm`] for use with [`MerkleTree`].
142pub struct HasherDigestAlgorithm;
143
144impl<E, I, H> DigestAlgorithm<E, I, HasherNode<H>> for HasherDigestAlgorithm
145where
146 E: Element + CanonicalSerialize,
147 I: Index + CanonicalSerialize,
148 H: HasherDigest,
149{
150 fn digest(data: &[HasherNode<H>]) -> Result<HasherNode<H>, MerkleTreeError> {
151 let mut hasher = H::new();
152 hasher.update(INTERNAL_HASH_DOM_SEP);
153 for value in data {
154 hasher.update(value.as_ref());
155 }
156 Ok(HasherNode(hasher.finalize()))
157 }
158
159 fn digest_leaf(pos: &I, elem: &E) -> Result<HasherNode<H>, MerkleTreeError> {
160 let mut hasher = H::new();
161 hasher.update(LEAF_HASH_DOM_SEP);
162 pos.serialize_uncompressed(&mut hasher)
163 .map_err(|_| MerkleTreeError::DigestError("Failed serializing pos".to_string()))?;
164 elem.serialize_uncompressed(&mut hasher)
165 .map_err(|_| MerkleTreeError::DigestError("Failed serializing elem".to_string()))?;
166 Ok(HasherNode(hasher.finalize()))
167 }
168}
169
170/// Newtype wrapper for hash output that impls [`NodeValue`](super::NodeValue).
171#[derive(Derivative)]
172#[derivative(
173 Clone(bound = ""),
174 Copy(bound = "<<H as OutputSizeUser>::OutputSize as ArrayLength<u8>>::ArrayType: Copy"),
175 Debug(bound = ""),
176 Default(bound = ""),
177 Eq(bound = ""),
178 Hash(bound = ""),
179 Ord(bound = ""),
180 PartialEq(bound = ""),
181 PartialOrd(bound = "")
182)]
183#[tagged("HASH")]
184pub struct HasherNode<H>(Output<H>)
185where
186 H: Digest;
187
188/// Allow creation from [`Output`]
189impl<H> From<Output<H>> for HasherNode<H>
190where
191 H: Digest,
192{
193 fn from(value: Output<H>) -> Self {
194 Self(value)
195 }
196}
197
198/// Allow access to the underlying [`Output`]
199impl<H> AsRef<Output<H>> for HasherNode<H>
200where
201 H: Digest,
202{
203 fn as_ref(&self) -> &Output<H> {
204 &self.0
205 }
206}
207
208// Manual impls of some subtraits of [`NodeValue`](super::NodeValue).
209impl<H> CanonicalSerialize for HasherNode<H>
210where
211 H: Digest,
212{
213 fn serialize_with_mode<W: Write>(
214 &self,
215 mut writer: W,
216 _compress: Compress,
217 ) -> Result<(), SerializationError> {
218 writer.write_all(&self.0)?;
219 Ok(())
220 }
221
222 fn serialized_size(&self, _compress: Compress) -> usize {
223 <H as Digest>::output_size()
224 }
225}
226impl<H> CanonicalDeserialize for HasherNode<H>
227where
228 H: Digest,
229{
230 fn deserialize_with_mode<R: Read>(
231 mut reader: R,
232 _compress: Compress,
233 _validate: Validate,
234 ) -> Result<Self, SerializationError> {
235 let mut ret = Output::<H>::default();
236 reader.read_exact(&mut ret)?;
237 Ok(HasherNode(ret))
238 }
239}
240impl<H> Valid for HasherNode<H>
241where
242 H: Digest,
243{
244 fn check(&self) -> Result<(), SerializationError> {
245 Ok(())
246 }
247}