Compare commits

...

2 Commits

Author SHA1 Message Date
Ryan Heywood 76c9214d73
keyfork-mnemonic-util: impl FromStr for Mnemonic
This changes the actual structure of Mnemonic since it requires
exclusively owned types when implementing FromStr. Now, Mnemonic
contains an Arc. Thread safety is required because of the Tokio
multithreaded runtime, hence an Arc instead of an Rc.

This does add some level of burden for people instantiating Mnemonics,
but `Wordlist::arc(self) -> Arc<Self>` has been provided as a
convenience method to make working with mnemonics easier.
2023-08-24 21:56:35 -05:00
Ryan Heywood ee15145662
keyfork-frame: initial commit 2023-08-24 20:25:42 -05:00
7 changed files with 743 additions and 16 deletions

193
Cargo.lock generated
View File

@ -34,6 +34,18 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "console"
version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"windows-sys",
]
[[package]] [[package]]
name = "cpufeatures" name = "cpufeatures"
version = "0.2.9" version = "0.2.9"
@ -63,6 +75,12 @@ dependencies = [
"crypto-common", "crypto-common",
] ]
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.7" version = "0.14.7"
@ -79,12 +97,35 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "insta"
version = "1.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0770b0a3d4c70567f0d58331f3088b0e4c4f56c9b8d764efe654b4a5d46de3a"
dependencies = [
"console",
"lazy_static",
"linked-hash-map",
"similar",
"yaml-rust",
]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.9" version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]]
name = "keyfork-frame"
version = "0.1.0"
dependencies = [
"hex",
"insta",
"sha2",
"thiserror",
]
[[package]] [[package]]
name = "keyfork-mnemonic-generate" name = "keyfork-mnemonic-generate"
version = "0.1.0" version = "0.1.0"
@ -102,12 +143,46 @@ dependencies = [
"sha2", "sha2",
] ]
[[package]]
name = "keyforkd"
version = "0.1.0"
[[package]]
name = "lazy_static"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.147" version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "proc-macro2"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]] [[package]]
name = "ryu" name = "ryu"
version = "1.0.15" version = "1.0.15"
@ -142,6 +217,43 @@ dependencies = [
"digest", "digest",
] ]
[[package]]
name = "similar"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf"
[[package]]
name = "syn"
version = "2.0.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "thiserror"
version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.6.0" version = "1.6.0"
@ -163,6 +275,12 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "unicode-ident"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
version = "0.1.22" version = "0.1.22"
@ -177,3 +295,78 @@ name = "version_check"
version = "0.9.4" version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "windows-sys"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]

View File

@ -2,5 +2,6 @@
members = [ members = [
"keyfork-mnemonic-generate", "keyfork-mnemonic-generate",
"keyfork-mnemonic-util" "keyfork-mnemonic-util",
"keyfork-frame"
] ]

14
keyfork-frame/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "keyfork-frame"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hex = "0.4.3"
sha2 = "0.10.7"
thiserror = "1.0.47"
[dev-dependencies]
insta = "1.31.0"

157
keyfork-frame/src/lib.rs Normal file
View File

@ -0,0 +1,157 @@
//! Utility functions to quickly encode and decode `&[u8]` to and from framed messages.
//!
//! Framed messages consist of the following items:
//!
//! ```txt
//! | len: u32 of data.len() | data: binary data |
//! ```
//!
//! The data stored after the length consists of the following items:
//!
//! ```txt
//! | checksum: [u8; 32] sha256 hash of `raw_data` | raw_data: &[u8] |
//! ```
use sha2::{Digest, Sha256};
#[derive(Debug, Clone, thiserror::Error)]
pub enum DecodeError {
/// There were not enough bytes to determine the length of the data slice.
#[error("Invalid length: {0}")]
InvalidLength(std::array::TryFromSliceError),
/// There were not enough bytes to read a checksum of the data slice.
#[error("Invalid checksum: {0} bytes")]
InvalidChecksum(std::array::TryFromSliceError),
/// There were not enough bytes to read the rest of the data.
#[error("Incorrect length of internal data: {0}, expected at least: {1}")]
IncorrectLength(usize, u32),
/// The provided checksum of the data did not match the locally-generated checksum.
#[error("Checksum did not match: Their {0} != Our {1}")]
BadChecksum(String, String),
}
#[derive(Debug, Clone, thiserror::Error)]
pub enum EncodeError {
/// The given input was larger than could be encoded by this protocol.
#[error("Input too large to encode: {0}")]
InputTooLarge(usize),
}
const LEN_SIZE: usize = std::mem::size_of::<u32>();
fn hash(data: &[u8]) -> Vec<u8> {
let mut hashobj = Sha256::new();
hashobj.update(data);
hashobj.finalize().to_vec()
}
/// Encode a given `&[u8]` to a framed message.
///
/// # Errors
/// An error may be returned if the given `data` is more than [`u32::MAX`] bytes. This is a
/// constraint on a protocol level.
pub fn try_encode(data: &[u8]) -> Result<Vec<u8>, EncodeError> {
let hash = hash(data);
let content = hash.iter().chain(data.iter()).copied().collect::<Vec<_>>();
let mut result = (u32::try_from(content.len())
.map_err(|_| EncodeError::InputTooLarge(content.len()))?)
.to_be_bytes()
.to_vec();
result.extend(content);
Ok(result)
}
/// Decode a framed message into a `Vec<u8>`.
///
/// # Errors
/// An error may be returned if:
/// * The given `data` does not contain enough data to parse a length,
/// * The given `data` does not contain the given length's worth of data,
/// * The given `data` has a checksum that does not match what we build locally.
pub fn try_decode(data: &[u8]) -> Result<Vec<u8>, DecodeError> {
// check length and advance data pointer beyond length check
let len_bytes: [u8; LEN_SIZE] = data[..LEN_SIZE]
.try_into()
.map_err(DecodeError::InvalidLength)?;
let len = u32::from_be_bytes(len_bytes);
if len as usize + LEN_SIZE > data.len() {
return Err(DecodeError::IncorrectLength(data.len() - LEN_SIZE, len));
}
let data = &data[LEN_SIZE..];
let checksum: &[u8; 32] = &data[..32]
.try_into()
.map_err(DecodeError::InvalidChecksum)?;
let content = &data[32..];
let our_checksum = hash(content);
if our_checksum != checksum {
return Err(DecodeError::BadChecksum(
hex::encode(checksum),
hex::encode(our_checksum),
));
}
Ok(content.to_vec())
}
#[cfg(test)]
mod tests {
use super::{try_encode, try_decode, DecodeError};
#[test]
fn stable_interface() {
let data = (0..255).collect::<Vec<u8>>();
insta::assert_debug_snapshot!(try_encode(&data[..]));
}
#[test]
fn equivalency() -> Result<(), DecodeError> {
let data = (0..255).collect::<Vec<u8>>();
assert_eq!(try_decode(&try_encode(&data[..]).unwrap())?, data);
Ok(())
}
#[test]
fn allows_extra_data() -> Result<(), DecodeError> {
let data = (0..255).collect::<Vec<u8>>();
let mut encoded = try_encode(&data[..]).unwrap();
// Throw on some extra data
encoded.extend(0..255);
assert_eq!(try_decode(&try_encode(&data[..]).unwrap())?, data);
Ok(())
}
#[test]
fn error_on_smaller_data() {
// Data sliced by 1 byte
let data = (0..255).collect::<Vec<u8>>();
let encoded = try_encode(&data[..]).unwrap();
let error = try_decode(&encoded[..data.len() - 1]);
assert!(error.is_err());
// Data includes length and checksum
let error = try_decode(&encoded[..super::LEN_SIZE + 256 / 8]);
assert!(error.is_err());
// Data only includes length
let data = 12u32.to_be_bytes();
let error = try_decode(&data[..]);
assert!(error.is_err());
}
#[test]
fn error_on_invalid_checksum() {
let data = (0..255).collect::<Vec<u8>>();
let mut encoded = try_encode(&data[..]).unwrap();
assert_ne!(encoded[super::LEN_SIZE + 1], 0);
encoded[super::LEN_SIZE + 1] = 0;
let error = try_decode(&data[..]);
assert!(error.is_err());
}
}

View File

@ -0,0 +1,299 @@
---
source: keyfork-frame/src/lib.rs
expression: "try_encode(&data[..])"
---
Ok(
[
0,
0,
1,
31,
63,
133,
145,
17,
44,
107,
190,
92,
150,
57,
101,
149,
78,
41,
49,
8,
183,
32,
142,
210,
175,
137,
62,
80,
13,
133,
147,
104,
198,
84,
234,
190,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66,
67,
68,
69,
70,
71,
72,
73,
74,
75,
76,
77,
78,
79,
80,
81,
82,
83,
84,
85,
86,
87,
88,
89,
90,
91,
92,
93,
94,
95,
96,
97,
98,
99,
100,
101,
102,
103,
104,
105,
106,
107,
108,
109,
110,
111,
112,
113,
114,
115,
116,
117,
118,
119,
120,
121,
122,
123,
124,
125,
126,
127,
128,
129,
130,
131,
132,
133,
134,
135,
136,
137,
138,
139,
140,
141,
142,
143,
144,
145,
146,
147,
148,
149,
150,
151,
152,
153,
154,
155,
156,
157,
158,
159,
160,
161,
162,
163,
164,
165,
166,
167,
168,
169,
170,
171,
172,
173,
174,
175,
176,
177,
178,
179,
180,
181,
182,
183,
184,
185,
186,
187,
188,
189,
190,
191,
192,
193,
194,
195,
196,
197,
198,
199,
200,
201,
202,
203,
204,
205,
206,
207,
208,
209,
210,
211,
212,
213,
214,
215,
216,
217,
218,
219,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
248,
249,
250,
251,
252,
253,
254,
],
)

View File

@ -100,8 +100,8 @@ fn main() -> Result<()> {
let entropy = &mut [0u8; 256 / 8]; let entropy = &mut [0u8; 256 / 8];
rng.read_into(&mut entropy[..])?; rng.read_into(&mut entropy[..])?;
let wordlist = Wordlist::default(); let wordlist = Wordlist::default().arc();
let mnemonic = Mnemonic::from_entropy(&entropy[..bit_size / 8], &wordlist)?; let mnemonic = Mnemonic::from_entropy(&entropy[..bit_size / 8], wordlist)?;
println!("{mnemonic}"); println!("{mnemonic}");
@ -119,13 +119,13 @@ mod tests {
let tests = 100_000; let tests = 100_000;
let mut count = 0.; let mut count = 0.;
let entropy = &mut [0u8; 256 / 8]; let entropy = &mut [0u8; 256 / 8];
let wordlist = Wordlist::default(); let wordlist = Wordlist::default().arc();
let mut rng = Entropy::new().unwrap(); let mut rng = Entropy::new().unwrap();
let mut hs = HashSet::<usize>::with_capacity(24); let mut hs = HashSet::<usize>::with_capacity(24);
for _ in 0..tests { for _ in 0..tests {
rng.read_into(&mut entropy[..]).unwrap(); rng.read_into(&mut entropy[..]).unwrap();
let mnemonic = Mnemonic::from_entropy(&entropy[..256 / 8], &wordlist).unwrap(); let mnemonic = Mnemonic::from_entropy(&entropy[..256 / 8], wordlist.clone()).unwrap();
let (words, _) = mnemonic.into_inner(); let (words, _) = mnemonic.into_inner();
hs.clear(); hs.clear();
hs.extend(words); hs.extend(words);

View File

@ -1,3 +1,5 @@
use std::{collections::HashMap, str::FromStr, sync::Arc};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{error::Error, fmt::Display}; use std::{error::Error, fmt::Display};
@ -49,11 +51,21 @@ impl Default for Wordlist {
} }
impl Wordlist { impl Wordlist {
/// Return an Arced version of the Wordlist
#[allow(clippy::must_use_candidate)]
pub fn arc(self) -> Arc<Self> {
Arc::new(self)
}
/// Given an index, get a word from the wordlist. /// Given an index, get a word from the wordlist.
fn get_word(&self, word: usize) -> Option<&String> { fn get_word(&self, word: usize) -> Option<&String> {
self.0.get(word) self.0.get(word)
} }
fn inner(&self) -> &Vec<String> {
&self.0
}
#[cfg(test)] #[cfg(test)]
fn into_inner(self) -> Vec<String> { fn into_inner(self) -> Vec<String> {
self.0 self.0
@ -62,12 +74,12 @@ impl Wordlist {
/// A BIP-0039 mnemonic with reference to a [`Wordlist`]. /// A BIP-0039 mnemonic with reference to a [`Wordlist`].
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Mnemonic<'a> { pub struct Mnemonic {
words: Vec<usize>, words: Vec<usize>,
wordlist: &'a Wordlist, wordlist: Arc<Wordlist>,
} }
impl<'a> Display for Mnemonic<'a> { impl Display for Mnemonic {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut iter = self.words.iter().peekable(); let mut iter = self.words.iter().peekable();
while let Some(word_index) = iter.next() { while let Some(word_index) = iter.next() {
@ -81,21 +93,72 @@ impl<'a> Display for Mnemonic<'a> {
} }
} }
/// The error type representing a failure to parse a [`Mnemonic`]. These errors only occur during
/// [`Mnemonic`] creation.
#[derive(Debug, Clone)]
pub enum MnemonicFromStrError {
/// The amount of words used to parse a mnemonic was not correct.
InvalidWordCount(usize),
/// One of the words used to generate the mnemonic was not found in the default wordlist.
InvalidWord(usize),
}
impl Display for MnemonicFromStrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
MnemonicFromStrError::InvalidWordCount(count) => {
write!(f, "Incorrect word count: {count}")
}
MnemonicFromStrError::InvalidWord(index) => {
write!(f, "Unknown word at index: {index}")
}
}
}
}
impl Error for MnemonicFromStrError {}
impl FromStr for Mnemonic {
type Err = MnemonicFromStrError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let wordlist = Wordlist::default().arc();
let hm: HashMap<&str, usize> = wordlist
.inner()
.iter()
.enumerate()
.map(|(a, b)| (b.as_str(), a))
.collect();
let mut words: Vec<usize> = Vec::with_capacity(24);
for (index, word) in s.split_whitespace().enumerate() {
match hm.get(&word) {
Some(id) => words.push(*id),
None => return Err(MnemonicFromStrError::InvalidWord(index)),
}
}
if ![12, 24].contains(&words.len()) {
return Err(MnemonicFromStrError::InvalidWordCount(words.len()));
}
Ok(Mnemonic { words, wordlist })
}
}
fn generate_slice_hash(data: &[u8]) -> Vec<u8> { fn generate_slice_hash(data: &[u8]) -> Vec<u8> {
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(data); hasher.update(data);
hasher.finalize().to_vec() hasher.finalize().to_vec()
} }
impl<'a> Mnemonic<'a> { impl Mnemonic {
/// Generate a [`Mnemonic`] from the provided entropy and [`Wordlist`]. /// Generate a [`Mnemonic`] from the provided entropy and [`Wordlist`].
/// ///
/// # Errors /// # Errors
/// An error may be returned if the entropy is not within the acceptable lengths. /// An error may be returned if the entropy is not within the acceptable lengths.
pub fn from_entropy( pub fn from_entropy(
bytes: &[u8], bytes: &[u8],
wordlist: &'a Wordlist, wordlist: Arc<Wordlist>,
) -> Result<Mnemonic<'a>, MnemonicGenerationError> { ) -> Result<Mnemonic, MnemonicGenerationError> {
let bit_count = bytes.len() * 8; let bit_count = bytes.len() * 8;
let hash = generate_slice_hash(bytes); let hash = generate_slice_hash(bytes);
@ -135,7 +198,7 @@ impl<'a> Mnemonic<'a> {
} }
#[must_use] #[must_use]
pub fn into_inner(self) -> (Vec<usize>, &'a Wordlist) { pub fn into_inner(self) -> (Vec<usize>, Arc<Wordlist>) {
(self.words, self.wordlist) (self.words, self.wordlist)
} }
} }
@ -160,6 +223,7 @@ mod tests {
fn conforms_to_trezor_tests() { fn conforms_to_trezor_tests() {
let content = include_str!("data/vectors.json"); let content = include_str!("data/vectors.json");
let jsonobj: serde_json::Value = serde_json::from_str(content).unwrap(); let jsonobj: serde_json::Value = serde_json::from_str(content).unwrap();
let wordlist = Wordlist::default().arc();
for test in jsonobj["english"].as_array().unwrap() { for test in jsonobj["english"].as_array().unwrap() {
let [ref hex_, ref seed, ..] = test.as_array().unwrap()[..] else { let [ref hex_, ref seed, ..] = test.as_array().unwrap()[..] else {
@ -167,8 +231,7 @@ mod tests {
}; };
let hex = hex::decode(hex_.as_str().unwrap()).unwrap(); let hex = hex::decode(hex_.as_str().unwrap()).unwrap();
let wordlist = Wordlist::default(); let mnemonic = Mnemonic::from_entropy(&hex, wordlist.clone()).unwrap();
let mnemonic = Mnemonic::from_entropy(&hex, &wordlist).unwrap();
assert_eq!(mnemonic.to_string(), seed.as_str().unwrap()); assert_eq!(mnemonic.to_string(), seed.as_str().unwrap());
} }
@ -179,8 +242,8 @@ mod tests {
let mut random_handle = File::open("/dev/random").unwrap(); let mut random_handle = File::open("/dev/random").unwrap();
let entropy = &mut [0u8; 256 / 8]; let entropy = &mut [0u8; 256 / 8];
random_handle.read_exact(&mut entropy[..]).unwrap(); random_handle.read_exact(&mut entropy[..]).unwrap();
let wordlist = Wordlist::default(); let wordlist = Wordlist::default().arc();
let my_mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], &wordlist).unwrap(); let my_mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], wordlist).unwrap();
let their_mnemonic = bip39::Mnemonic::from_entropy(&entropy[..256 / 8]).unwrap(); let their_mnemonic = bip39::Mnemonic::from_entropy(&entropy[..256 / 8]).unwrap();
assert_eq!(my_mnemonic.to_string(), their_mnemonic.to_string()); assert_eq!(my_mnemonic.to_string(), their_mnemonic.to_string());
} }