keyfork-mnemonic-util: .seed() => .entropy(), make new bip39-compliant .seed() method

This commit is contained in:
Ryan Heywood 2023-10-07 23:04:47 -05:00
parent ea0a207a5f
commit 0f31cd2424
Signed by: ryan
GPG Key ID: 8E401478A3FBEF72
6 changed files with 60 additions and 67 deletions

12
Cargo.lock generated
View File

@ -927,6 +927,8 @@ version = "0.1.0"
dependencies = [ dependencies = [
"bip39", "bip39",
"hex", "hex",
"hmac",
"pbkdf2",
"serde_json", "serde_json",
"sha2", "sha2",
] ]
@ -1226,6 +1228,16 @@ dependencies = [
"windows-targets 0.48.5", "windows-targets 0.48.5",
] ]
[[package]]
name = "pbkdf2"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
dependencies = [
"digest 0.10.7",
"hmac",
]
[[package]] [[package]]
name = "peeking_take_while" name = "peeking_take_while"
version = "0.1.2" version = "0.1.2"

View File

@ -1,7 +1,7 @@
use crate::{ use crate::{
extended_key::private_key::Error as XPrvError, DerivationPath, ExtendedPrivateKey, PrivateKey, extended_key::private_key::Error as XPrvError, DerivationPath, ExtendedPrivateKey, PrivateKey,
}; };
use keyfork_mnemonic_util::Mnemonic; use keyfork_mnemonic_util::{Mnemonic, MnemonicGenerationError};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
@ -9,6 +9,9 @@ pub enum DerivationError {
#[error("algorithm not supported")] #[error("algorithm not supported")]
Algorithm, Algorithm,
#[error("Unable to create seed from mnemonic: {0}")]
Mnemonic(#[from] MnemonicGenerationError),
#[error("{0}")] #[error("{0}")]
ExtendedPrivateKey(#[from] XPrvError), ExtendedPrivateKey(#[from] XPrvError),
} }
@ -79,7 +82,8 @@ impl DerivationRequest {
} }
pub fn derive_with_mnemonic(&self, mnemonic: &Mnemonic) -> Result<DerivationResponse> { pub fn derive_with_mnemonic(&self, mnemonic: &Mnemonic) -> Result<DerivationResponse> {
self.derive_with_master_seed(mnemonic.seed()) // TODO: passphrase support and/or store passphrase within mnemonic
self.derive_with_master_seed(mnemonic.seed(None)?)
} }
pub fn derive_with_master_seed(&self, seed: Vec<u8>) -> Result<DerivationResponse> { pub fn derive_with_master_seed(&self, seed: Vec<u8>) -> Result<DerivationResponse> {

View File

@ -7,6 +7,8 @@ repository = "https://git.distrust.co/public/keyfork"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
hmac = "0.12.1"
pbkdf2 = "0.12.2"
# Included in rust # Included in rust
sha2 = "0.10.7" sha2 = "0.10.7"

View File

@ -1,7 +1,8 @@
use std::{collections::HashMap, str::FromStr, sync::Arc}; use std::{collections::HashMap, str::FromStr, sync::Arc, error::Error, fmt::Display};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256, Sha512};
use std::{error::Error, fmt::Display}; use pbkdf2::pbkdf2;
use hmac::Hmac;
/// The error type representing a failure to create a [`Mnemonic`]. These errors only occur during /// The error type representing a failure to create a [`Mnemonic`]. These errors only occur during
/// [`Mnemonic`] creation. /// [`Mnemonic`] creation.
@ -13,6 +14,9 @@ pub enum MnemonicGenerationError {
/// The length of a mnemonic in bits must be within the BIP-0039 range, and supported by the /// The length of a mnemonic in bits must be within the BIP-0039 range, and supported by the
/// library. Currently, only 128, 192 (for testing purposes), and 256 are supported. /// library. Currently, only 128, 192 (for testing purposes), and 256 are supported.
InvalidByteLength(usize), InvalidByteLength(usize),
/// Invalid length resulting from PBKDF2.
InvalidPbkdf2Length,
} }
impl Display for MnemonicGenerationError { impl Display for MnemonicGenerationError {
@ -24,6 +28,9 @@ impl Display for MnemonicGenerationError {
MnemonicGenerationError::InvalidByteLength(count) => { MnemonicGenerationError::InvalidByteLength(count) => {
write!(f, "Invalid byte length: {count}, must be 128 or 256") write!(f, "Invalid byte length: {count}, must be 128 or 256")
} }
MnemonicGenerationError::InvalidPbkdf2Length => {
f.write_str("Invalid length from PBKDF2")
},
} }
} }
} }
@ -144,12 +151,6 @@ impl FromStr for Mnemonic {
} }
} }
fn generate_slice_hash(data: &[u8]) -> Vec<u8> {
let mut hasher = Sha256::new();
hasher.update(data);
hasher.finalize().to_vec()
}
impl Mnemonic { impl Mnemonic {
/// Generate a [`Mnemonic`] from the provided entropy and [`Wordlist`]. /// Generate a [`Mnemonic`] from the provided entropy and [`Wordlist`].
/// ///
@ -160,7 +161,6 @@ impl Mnemonic {
wordlist: Arc<Wordlist>, wordlist: Arc<Wordlist>,
) -> Result<Mnemonic, MnemonicGenerationError> { ) -> Result<Mnemonic, MnemonicGenerationError> {
let bit_count = bytes.len() * 8; let bit_count = bytes.len() * 8;
let hash = generate_slice_hash(bytes);
if bit_count % 32 != 0 { if bit_count % 32 != 0 {
return Err(MnemonicGenerationError::InvalidByteCount(bit_count)); return Err(MnemonicGenerationError::InvalidByteCount(bit_count));
@ -177,6 +177,10 @@ impl Mnemonic {
bits[byte_index * 8 + bit_index] = (bytes[byte_index] & (1 << (7 - bit_index))) > 0; bits[byte_index * 8 + bit_index] = (bytes[byte_index] & (1 << (7 - bit_index))) > 0;
} }
} }
let mut hasher = Sha256::new();
hasher.update(bytes);
let hash = hasher.finalize().to_vec();
for check_bit in 0..bit_count / 32 { for check_bit in 0..bit_count / 32 {
bits[bit_count + check_bit] = (hash[check_bit / 8] & (1 << (7 - (check_bit % 8)))) > 0; bits[bit_count + check_bit] = (hash[check_bit / 8] & (1 << (7 - (check_bit % 8)))) > 0;
} }
@ -185,7 +189,6 @@ impl Mnemonic {
// NOTE: Tested with all approved variants. Always divisible by 11. // NOTE: Tested with all approved variants. Always divisible by 11.
.chunks_exact(11) .chunks_exact(11)
.map(|chunk| { .map(|chunk| {
// NOTE: usize to use for indexing wordlist later
let mut num = 0usize; let mut num = 0usize;
for i in 0..11 { for i in 0..11 {
num += usize::from(chunk[10 - i]) << i; num += usize::from(chunk[10 - i]) << i;
@ -197,15 +200,17 @@ impl Mnemonic {
Ok(Mnemonic { words, wordlist }) Ok(Mnemonic { words, wordlist })
} }
#[must_use] pub fn entropy(&self) -> Vec<u8> {
pub fn seed(&self) -> Vec<u8> {
let mut bits = vec![false; self.words.len() * 11]; let mut bits = vec![false; self.words.len() * 11];
for (index, word) in self.words.iter().enumerate() { for (index, word) in self.words.iter().enumerate() {
for bit in 0..11 { for bit in 0..11 {
bits[index * 11 + bit] = (word & (1 << (10 - bit))) > 0; bits[index * 11 + bit] = (word & (1 << (10 - bit))) > 0;
} }
} }
// remove checksum bits
bits.truncate(bits.len() - bits.len() % 32); bits.truncate(bits.len() - bits.len() % 32);
bits.chunks_exact(8) bits.chunks_exact(8)
.map(|chunk| { .map(|chunk| {
let mut num = 0u8; let mut num = 0u8;
@ -214,10 +219,20 @@ impl Mnemonic {
} }
num num
}) })
.collect::<Vec<_>>() .collect()
}
pub fn seed<'a>(&self, passphrase: impl Into<Option<&'a str>>) -> Result<Vec<u8>, MnemonicGenerationError> {
let passphrase = passphrase.into();
let mut seed = [0u8; 64];
let mnemonic = self.to_string();
let salt = ["mnemonic", passphrase.unwrap_or("")].join("");
pbkdf2::<Hmac<Sha512>>(mnemonic.as_bytes(), salt.as_bytes(), 2048, &mut seed)
.map_err(|_| MnemonicGenerationError::InvalidPbkdf2Length)?;
Ok(seed.to_vec())
} }
#[must_use]
pub fn into_inner(self) -> (Vec<usize>, Arc<Wordlist>) { pub fn into_inner(self) -> (Vec<usize>, Arc<Wordlist>) {
(self.words, self.wordlist) (self.words, self.wordlist)
} }
@ -246,8 +261,8 @@ mod tests {
random_handle.read_exact(&mut entropy[..]).unwrap(); random_handle.read_exact(&mut entropy[..]).unwrap();
let wordlist = Wordlist::default().arc(); let wordlist = Wordlist::default().arc();
let mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], wordlist).unwrap(); let mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], wordlist).unwrap();
let seed = mnemonic.seed(); let new_entropy = mnemonic.entropy();
assert_eq!(&seed, entropy); assert_eq!(&new_entropy, entropy);
} }
#[test] #[test]
@ -277,10 +292,13 @@ mod tests {
let my_mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], wordlist).unwrap(); let my_mnemonic = super::Mnemonic::from_entropy(&entropy[..256 / 8], wordlist).unwrap();
let their_mnemonic = bip39::Mnemonic::from_entropy(&entropy[..256 / 8]).unwrap(); let their_mnemonic = bip39::Mnemonic::from_entropy(&entropy[..256 / 8]).unwrap();
assert_eq!(my_mnemonic.to_string(), their_mnemonic.to_string()); assert_eq!(my_mnemonic.to_string(), their_mnemonic.to_string());
assert_eq!(my_mnemonic.seed(None).unwrap(), their_mnemonic.to_seed(""));
assert_eq!(my_mnemonic.seed("testing").unwrap(), their_mnemonic.to_seed("testing"));
assert_ne!(my_mnemonic.seed("test1").unwrap(), their_mnemonic.to_seed("test2"));
} }
#[test] #[test]
fn count_to_get_duplicate_words() { fn count_rate_of_duplicate_words() {
let tests = 100_000; let tests = 100_000;
let mut count = 0.; let mut count = 0.;
let entropy = &mut [0u8; 256 / 8]; let entropy = &mut [0u8; 256 / 8];

View File

@ -23,7 +23,8 @@ pub async fn start_and_run_server_on(
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
let service = ServiceBuilder::new() let service = ServiceBuilder::new()
.layer(middleware::BincodeLayer::new()) .layer(middleware::BincodeLayer::new())
.service(Keyforkd::new(mnemonic.seed())); // TODO: passphrase support and/or store passphrase with mnemonic
.service(Keyforkd::new(mnemonic.seed(None)?));
let mut server = match UnixServer::bind(socket_path) { let mut server = match UnixServer::bind(socket_path) {
Ok(s) => s, Ok(s) => s,

View File

@ -61,7 +61,6 @@ mod tests {
use super::*; use super::*;
use hex_literal::hex; use hex_literal::hex;
use keyfork_derive_util::{request::*, DerivationPath}; use keyfork_derive_util::{request::*, DerivationPath};
use keyfork_mnemonic_util::{Mnemonic, Wordlist};
use keyfork_slip10_test_data::test_data; use keyfork_slip10_test_data::test_data;
use std::str::FromStr; use std::str::FromStr;
use tower::ServiceExt; use tower::ServiceExt;
@ -73,28 +72,9 @@ mod tests {
.remove(&"secp256k1".to_string()) .remove(&"secp256k1".to_string())
.unwrap(); .unwrap();
let wordlist = Wordlist::default().arc();
for per_seed in tests { for per_seed in tests {
let seed = &per_seed.seed; let seed = &per_seed.seed;
// Test mnemonic path
if [128 / 8, 256 / 8].contains(&seed.len()) {
let mnemonic = Mnemonic::from_entropy(seed, wordlist.clone()).unwrap();
let mut keyforkd = Keyforkd::new(mnemonic.seed());
for test in &per_seed.tests {
let chain = DerivationPath::from_str(test.chain).unwrap();
if chain.len() < 2 {
continue;
}
let req = DerivationRequest::new(DerivationAlgorithm::Secp256k1, &chain);
let response = keyforkd.ready().await.unwrap().call(req).await.unwrap();
assert_eq!(response.data, test.private_key);
assert_eq!(response.chain_code.as_slice(), test.chain_code);
}
}
// Test seed path
let mut keyforkd = Keyforkd::new(seed.to_vec()); let mut keyforkd = Keyforkd::new(seed.to_vec());
for test in &per_seed.tests { for test in &per_seed.tests {
let chain = DerivationPath::from_str(test.chain).unwrap(); let chain = DerivationPath::from_str(test.chain).unwrap();
@ -116,27 +96,9 @@ mod tests {
.remove(&"ed25519".to_string()) .remove(&"ed25519".to_string())
.unwrap(); .unwrap();
let wordlist = Wordlist::default().arc();
for per_seed in tests { for per_seed in tests {
let seed = &per_seed.seed; let seed = &per_seed.seed;
// Test mnemonic path
if [128 / 8, 256 / 8].contains(&seed.len()) {
let mnemonic = Mnemonic::from_entropy(seed, wordlist.clone()).unwrap();
let mut keyforkd = Keyforkd::new(mnemonic.seed());
for test in &per_seed.tests {
let chain = DerivationPath::from_str(test.chain).unwrap();
if chain.len() < 2 {
continue;
}
let req = DerivationRequest::new(DerivationAlgorithm::Ed25519, &chain);
let response = keyforkd.ready().await.unwrap().call(req).await.unwrap();
assert_eq!(response.data, test.private_key);
assert_eq!(response.chain_code.as_slice(), test.chain_code);
}
}
// Test seed path // Test seed path
let mut keyforkd = Keyforkd::new(seed.to_vec()); let mut keyforkd = Keyforkd::new(seed.to_vec());
for test in &per_seed.tests { for test in &per_seed.tests {
@ -162,12 +124,9 @@ mod tests {
hex!("2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7"), hex!("2b4be7f19ee27bbf30c667b642d5f4aa69fd169872f8fc3059c08ebae2eb19e7"),
hex!("00a4b2856bfec510abab89753fac1ac0e1112364e7d250545963f135f2a33188ed"), hex!("00a4b2856bfec510abab89753fac1ac0e1112364e7d250545963f135f2a33188ed"),
)]; )];
let wordlist = Wordlist::default().arc();
for (seed, path, _, private_key, _) in tests { for (seed, path, _, private_key, _) in tests {
let mnemonic = Mnemonic::from_entropy(&seed[..], wordlist.clone()).unwrap();
assert_eq!(mnemonic.seed(), seed);
let req = DerivationRequest::new(DerivationAlgorithm::Ed25519, &path); let req = DerivationRequest::new(DerivationAlgorithm::Ed25519, &path);
let mut keyforkd = Keyforkd::new(mnemonic.seed()); let mut keyforkd = Keyforkd::new(seed.to_vec());
let response = keyforkd.ready().await.unwrap().call(req).await.unwrap(); let response = keyforkd.ready().await.unwrap().call(req).await.unwrap();
assert_eq!(response.data, private_key) assert_eq!(response.data, private_key)
} }
@ -183,12 +142,9 @@ mod tests {
hex!("68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3"), hex!("68e0fe46dfb67e368c75379acec591dad19df3cde26e63b93a8e704f1dade7a3"),
hex!("008c8a13df77a28f3445213a0f432fde644acaa215fc72dcdf300d5efaa85d350c"), hex!("008c8a13df77a28f3445213a0f432fde644acaa215fc72dcdf300d5efaa85d350c"),
)]; )];
let wordlist = Wordlist::default().arc();
for (seed, path, _, private_key, _) in tests { for (seed, path, _, private_key, _) in tests {
let mnemonic = Mnemonic::from_entropy(&seed[..], wordlist.clone()).unwrap();
assert_eq!(mnemonic.seed(), seed);
let req = DerivationRequest::new(DerivationAlgorithm::Ed25519, &path); let req = DerivationRequest::new(DerivationAlgorithm::Ed25519, &path);
let mut keyforkd = Keyforkd::new(mnemonic.seed()); let mut keyforkd = Keyforkd::new(seed.to_vec());
let response = keyforkd.ready().await.unwrap().call(req).await.unwrap(); let response = keyforkd.ready().await.unwrap().call(req).await.unwrap();
assert_eq!(response.data, private_key) assert_eq!(response.data, private_key)
} }