Merge rust-bitcoin/rust-bitcoin#4295: Automated nightly rustfmt (2025-03-30)
b8910e201e
2025-03-30 automated rustfmt nightly (Fmt Bot)
Pull request description:
Automated nightly `rustfmt` changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action
ACKs for top commit:
apoelstra:
ACK b8910e201ea4374bb0508aa50c6779d080ef6484; successfully ran local tests
Tree-SHA512: 9bd89439304322d30e04aae8f9803f9e07830b616cb49339ac4b1e0ee1bfe440865d9ebc7f41eea1bcd35cb751060215eec20d258d88e9f20dd3f905b9ab690f
This commit is contained in:
commit
59524adafd
|
@ -113,7 +113,8 @@ pub fn decode(data: &str) -> Result<Vec<u8>, InvalidCharacterError> {
|
||||||
/// Decodes a base58check-encoded string into a byte vector verifying the checksum.
|
/// Decodes a base58check-encoded string into a byte vector verifying the checksum.
|
||||||
pub fn decode_check(data: &str) -> Result<Vec<u8>, Error> {
|
pub fn decode_check(data: &str) -> Result<Vec<u8>, Error> {
|
||||||
let mut ret: Vec<u8> = decode(data)?;
|
let mut ret: Vec<u8> = decode(data)?;
|
||||||
let (remaining, &data_check) = ret.split_last_chunk::<4>().ok_or(TooShortError { length: ret.len() })?;
|
let (remaining, &data_check) =
|
||||||
|
ret.split_last_chunk::<4>().ok_or(TooShortError { length: ret.len() })?;
|
||||||
|
|
||||||
let hash_check = *sha256d::Hash::hash(remaining).as_byte_array().sub_array::<0, 4>();
|
let hash_check = *sha256d::Hash::hash(remaining).as_byte_array().sub_array::<0, 4>();
|
||||||
|
|
||||||
|
|
|
@ -409,7 +409,9 @@ struct DisplayUnchecked<'a, N: NetworkValidation>(&'a Address<N>);
|
||||||
|
|
||||||
#[cfg(feature = "serde")]
|
#[cfg(feature = "serde")]
|
||||||
impl<N: NetworkValidation> fmt::Display for DisplayUnchecked<'_, N> {
|
impl<N: NetworkValidation> fmt::Display for DisplayUnchecked<'_, N> {
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.0.inner(), fmt) }
|
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(&self.0.inner(), fmt)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "serde")]
|
#[cfg(feature = "serde")]
|
||||||
|
@ -459,17 +461,11 @@ impl<V: NetworkValidation> serde::Serialize for Address<V> {
|
||||||
/// Methods on [`Address`] that can be called on both `Address<NetworkChecked>` and
|
/// Methods on [`Address`] that can be called on both `Address<NetworkChecked>` and
|
||||||
/// `Address<NetworkUnchecked>`.
|
/// `Address<NetworkUnchecked>`.
|
||||||
impl<V: NetworkValidation> Address<V> {
|
impl<V: NetworkValidation> Address<V> {
|
||||||
fn from_inner(inner: AddressInner) -> Self {
|
fn from_inner(inner: AddressInner) -> Self { Address(PhantomData, inner) }
|
||||||
Address(PhantomData, inner)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_inner(self) -> AddressInner {
|
fn into_inner(self) -> AddressInner { self.1 }
|
||||||
self.1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn inner(&self) -> &AddressInner {
|
fn inner(&self) -> &AddressInner { &self.1 }
|
||||||
&self.1
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a reference to the address as if it was unchecked.
|
/// Returns a reference to the address as if it was unchecked.
|
||||||
pub fn as_unchecked(&self) -> &Address<NetworkUnchecked> {
|
pub fn as_unchecked(&self) -> &Address<NetworkUnchecked> {
|
||||||
|
@ -477,7 +473,9 @@ impl<V: NetworkValidation> Address<V> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Marks the network of this address as unchecked.
|
/// Marks the network of this address as unchecked.
|
||||||
pub fn into_unchecked(self) -> Address<NetworkUnchecked> { Address::from_inner(self.into_inner()) }
|
pub fn into_unchecked(self) -> Address<NetworkUnchecked> {
|
||||||
|
Address::from_inner(self.into_inner())
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the [`NetworkKind`] of this address.
|
/// Returns the [`NetworkKind`] of this address.
|
||||||
pub fn network_kind(&self) -> NetworkKind {
|
pub fn network_kind(&self) -> NetworkKind {
|
||||||
|
@ -807,9 +805,7 @@ impl Address<NetworkUnchecked> {
|
||||||
/// Returns a reference to the checked address.
|
/// Returns a reference to the checked address.
|
||||||
///
|
///
|
||||||
/// This function is dangerous in case the address is not a valid checked address.
|
/// This function is dangerous in case the address is not a valid checked address.
|
||||||
pub fn assume_checked_ref(&self) -> &Address {
|
pub fn assume_checked_ref(&self) -> &Address { Address::from_inner_ref(self.inner()) }
|
||||||
Address::from_inner_ref(self.inner())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parsed addresses do not always have *one* network. The problem is that legacy testnet,
|
/// Parsed addresses do not always have *one* network. The problem is that legacy testnet,
|
||||||
/// regtest and signet addresses use the same prefix instead of multiple different ones. When
|
/// regtest and signet addresses use the same prefix instead of multiple different ones. When
|
||||||
|
@ -920,7 +916,8 @@ impl Address<NetworkUnchecked> {
|
||||||
return Err(LegacyAddressTooLongError { length: s.len() }.into());
|
return Err(LegacyAddressTooLongError { length: s.len() }.into());
|
||||||
}
|
}
|
||||||
let data = base58::decode_check(s)?;
|
let data = base58::decode_check(s)?;
|
||||||
let data: &[u8; 21] = (&*data).try_into().map_err(|_| InvalidBase58PayloadLengthError { length: s.len() })?;
|
let data: &[u8; 21] =
|
||||||
|
(&*data).try_into().map_err(|_| InvalidBase58PayloadLengthError { length: s.len() })?;
|
||||||
|
|
||||||
let (prefix, &data) = data.split_first();
|
let (prefix, &data) = data.split_first();
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,8 @@ use core::{convert, fmt, mem};
|
||||||
use std::error;
|
use std::error;
|
||||||
|
|
||||||
use hashes::{sha256, siphash24};
|
use hashes::{sha256, siphash24};
|
||||||
use internals::{ToU64 as _, array::ArrayExt as _};
|
use internals::array::ArrayExt as _;
|
||||||
|
use internals::ToU64 as _;
|
||||||
use io::{BufRead, Write};
|
use io::{BufRead, Write};
|
||||||
|
|
||||||
use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt};
|
use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt};
|
||||||
|
|
|
@ -42,7 +42,8 @@ use core::convert::Infallible;
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
use hashes::{sha256d, siphash24, HashEngine as _};
|
use hashes::{sha256d, siphash24, HashEngine as _};
|
||||||
use internals::{write_err, ToU64 as _, array::ArrayExt as _};
|
use internals::array::ArrayExt as _;
|
||||||
|
use internals::{write_err, ToU64 as _};
|
||||||
use io::{BufRead, Write};
|
use io::{BufRead, Write};
|
||||||
|
|
||||||
use crate::block::{Block, BlockHash, Checked};
|
use crate::block::{Block, BlockHash, Checked};
|
||||||
|
|
|
@ -682,10 +682,9 @@ impl Xpriv {
|
||||||
|
|
||||||
engine.input(&u32::from(i).to_be_bytes());
|
engine.input(&u32::from(i).to_be_bytes());
|
||||||
let hmac: Hmac<sha512::Hash> = engine.finalize();
|
let hmac: Hmac<sha512::Hash> = engine.finalize();
|
||||||
let sk = secp256k1::SecretKey::from_byte_array(
|
let sk =
|
||||||
hmac.as_byte_array().split_array::<32, 32>().0,
|
secp256k1::SecretKey::from_byte_array(hmac.as_byte_array().split_array::<32, 32>().0)
|
||||||
)
|
.expect("statistically impossible to hit");
|
||||||
.expect("statistically impossible to hit");
|
|
||||||
let tweaked =
|
let tweaked =
|
||||||
sk.add_tweak(&self.private_key.into()).expect("statistically impossible to hit");
|
sk.add_tweak(&self.private_key.into()).expect("statistically impossible to hit");
|
||||||
|
|
||||||
|
@ -701,14 +700,8 @@ impl Xpriv {
|
||||||
|
|
||||||
/// Decoding extended private key from binary data according to BIP 32
|
/// Decoding extended private key from binary data according to BIP 32
|
||||||
pub fn decode(data: &[u8]) -> Result<Xpriv, Error> {
|
pub fn decode(data: &[u8]) -> Result<Xpriv, Error> {
|
||||||
let Common {
|
let Common { network, depth, parent_fingerprint, child_number, chain_code, key } =
|
||||||
network,
|
Common::decode(data)?;
|
||||||
depth,
|
|
||||||
parent_fingerprint,
|
|
||||||
child_number,
|
|
||||||
chain_code,
|
|
||||||
key,
|
|
||||||
} = Common::decode(data)?;
|
|
||||||
|
|
||||||
let network = match network {
|
let network = match network {
|
||||||
VERSION_BYTES_MAINNET_PRIVATE => NetworkKind::Main,
|
VERSION_BYTES_MAINNET_PRIVATE => NetworkKind::Main,
|
||||||
|
@ -834,7 +827,7 @@ impl Xpub {
|
||||||
|
|
||||||
let hmac = engine.finalize();
|
let hmac = engine.finalize();
|
||||||
let private_key = secp256k1::SecretKey::from_byte_array(
|
let private_key = secp256k1::SecretKey::from_byte_array(
|
||||||
hmac.as_byte_array().split_array::<32, 32>().0
|
hmac.as_byte_array().split_array::<32, 32>().0,
|
||||||
)?;
|
)?;
|
||||||
let chain_code = ChainCode::from_hmac(hmac);
|
let chain_code = ChainCode::from_hmac(hmac);
|
||||||
Ok((private_key, chain_code))
|
Ok((private_key, chain_code))
|
||||||
|
@ -863,14 +856,8 @@ impl Xpub {
|
||||||
|
|
||||||
/// Decoding extended public key from binary data according to BIP 32
|
/// Decoding extended public key from binary data according to BIP 32
|
||||||
pub fn decode(data: &[u8]) -> Result<Xpub, Error> {
|
pub fn decode(data: &[u8]) -> Result<Xpub, Error> {
|
||||||
let Common {
|
let Common { network, depth, parent_fingerprint, child_number, chain_code, key } =
|
||||||
network,
|
Common::decode(data)?;
|
||||||
depth,
|
|
||||||
parent_fingerprint,
|
|
||||||
child_number,
|
|
||||||
chain_code,
|
|
||||||
key,
|
|
||||||
} = Common::decode(data)?;
|
|
||||||
|
|
||||||
let network = match network {
|
let network = match network {
|
||||||
VERSION_BYTES_MAINNET_PUBLIC => NetworkKind::Main,
|
VERSION_BYTES_MAINNET_PUBLIC => NetworkKind::Main,
|
||||||
|
@ -1000,7 +987,8 @@ struct Common {
|
||||||
|
|
||||||
impl Common {
|
impl Common {
|
||||||
fn decode(data: &[u8]) -> Result<Self, Error> {
|
fn decode(data: &[u8]) -> Result<Self, Error> {
|
||||||
let data: &[u8; 78] = data.try_into().map_err(|_| Error::WrongExtendedKeyLength(data.len()))?;
|
let data: &[u8; 78] =
|
||||||
|
data.try_into().map_err(|_| Error::WrongExtendedKeyLength(data.len()))?;
|
||||||
|
|
||||||
let (&network, data) = data.split_array::<4, 74>();
|
let (&network, data) = data.split_array::<4, 74>();
|
||||||
let (&depth, data) = data.split_first::<73>();
|
let (&depth, data) = data.split_first::<73>();
|
||||||
|
|
|
@ -58,7 +58,6 @@ mod primitive {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PushBytes {
|
impl PushBytes {
|
||||||
|
|
||||||
/// Constructs an empty `&PushBytes`.
|
/// Constructs an empty `&PushBytes`.
|
||||||
pub fn empty() -> &'static Self { Self::from_slice_unchecked(&[]) }
|
pub fn empty() -> &'static Self { Self::from_slice_unchecked(&[]) }
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::crypto::key::SerializedXOnlyPublicKey;
|
||||||
use crate::prelude::Vec;
|
use crate::prelude::Vec;
|
||||||
#[cfg(doc)]
|
#[cfg(doc)]
|
||||||
use crate::script::ScriptExt as _;
|
use crate::script::ScriptExt as _;
|
||||||
use crate::taproot::{self, ControlBlock, LeafScript, TAPROOT_ANNEX_PREFIX, TaprootMerkleBranch};
|
use crate::taproot::{self, ControlBlock, LeafScript, TaprootMerkleBranch, TAPROOT_ANNEX_PREFIX};
|
||||||
use crate::Script;
|
use crate::Script;
|
||||||
|
|
||||||
type BorrowedControlBlock<'a> = ControlBlock<&'a TaprootMerkleBranch, &'a SerializedXOnlyPublicKey>;
|
type BorrowedControlBlock<'a> = ControlBlock<&'a TaprootMerkleBranch, &'a SerializedXOnlyPublicKey>;
|
||||||
|
@ -387,7 +387,8 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn get_tapscript() {
|
fn get_tapscript() {
|
||||||
let tapscript = hex!("deadbeef");
|
let tapscript = hex!("deadbeef");
|
||||||
let control_block = hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
let control_block =
|
||||||
|
hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
// annex starting with 0x50 causes the branching logic.
|
// annex starting with 0x50 causes the branching logic.
|
||||||
let annex = hex!("50");
|
let annex = hex!("50");
|
||||||
|
|
||||||
|
@ -435,7 +436,8 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn get_control_block() {
|
fn get_control_block() {
|
||||||
let tapscript = hex!("deadbeef");
|
let tapscript = hex!("deadbeef");
|
||||||
let control_block = hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
let control_block =
|
||||||
|
hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
let expected_control_block = BorrowedControlBlock::decode_borrowed(&control_block).unwrap();
|
let expected_control_block = BorrowedControlBlock::decode_borrowed(&control_block).unwrap();
|
||||||
// annex starting with 0x50 causes the branching logic.
|
// annex starting with 0x50 causes the branching logic.
|
||||||
let annex = hex!("50");
|
let annex = hex!("50");
|
||||||
|
@ -454,7 +456,8 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn get_annex() {
|
fn get_annex() {
|
||||||
let tapscript = hex!("deadbeef");
|
let tapscript = hex!("deadbeef");
|
||||||
let control_block = hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
let control_block =
|
||||||
|
hex!("c0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
|
||||||
// annex starting with 0x50 causes the branching logic.
|
// annex starting with 0x50 causes the branching logic.
|
||||||
let annex = hex!("50");
|
let annex = hex!("50");
|
||||||
|
|
||||||
|
|
|
@ -26,10 +26,9 @@ use crate::taproot::{TapNodeHash, TapTweakHash};
|
||||||
|
|
||||||
#[rustfmt::skip] // Keep public re-exports separate.
|
#[rustfmt::skip] // Keep public re-exports separate.
|
||||||
pub use secp256k1::{constants, Keypair, Parity, Secp256k1, Verification, XOnlyPublicKey};
|
pub use secp256k1::{constants, Keypair, Parity, Secp256k1, Verification, XOnlyPublicKey};
|
||||||
pub use serialized_x_only::SerializedXOnlyPublicKey;
|
|
||||||
|
|
||||||
#[cfg(feature = "rand-std")]
|
#[cfg(feature = "rand-std")]
|
||||||
pub use secp256k1::rand;
|
pub use secp256k1::rand;
|
||||||
|
pub use serialized_x_only::SerializedXOnlyPublicKey;
|
||||||
|
|
||||||
/// A Bitcoin ECDSA public key.
|
/// A Bitcoin ECDSA public key.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
@ -528,11 +527,7 @@ impl PrivateKey {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(PrivateKey {
|
Ok(PrivateKey { compressed, network, inner: secp256k1::SecretKey::from_byte_array(key)? })
|
||||||
compressed,
|
|
||||||
network,
|
|
||||||
inner: secp256k1::SecretKey::from_byte_array(key)?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new private key with the negated secret value.
|
/// Returns a new private key with the negated secret value.
|
||||||
|
@ -1239,19 +1234,13 @@ mod serialized_x_only {
|
||||||
|
|
||||||
impl SerializedXOnlyPublicKey {
|
impl SerializedXOnlyPublicKey {
|
||||||
/// Marks the supplied bytes as a serialized x-only public key.
|
/// Marks the supplied bytes as a serialized x-only public key.
|
||||||
pub const fn from_byte_array(bytes: [u8; 32]) -> Self {
|
pub const fn from_byte_array(bytes: [u8; 32]) -> Self { Self(bytes) }
|
||||||
Self(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the raw bytes.
|
/// Returns the raw bytes.
|
||||||
pub const fn to_byte_array(self) -> [u8; 32] {
|
pub const fn to_byte_array(self) -> [u8; 32] { self.0 }
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a reference to the raw bytes.
|
/// Returns a reference to the raw bytes.
|
||||||
pub const fn as_byte_array(&self) -> &[u8; 32] {
|
pub const fn as_byte_array(&self) -> &[u8; 32] { &self.0 }
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1263,15 +1252,11 @@ impl SerializedXOnlyPublicKey {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<[u8; 32]> for SerializedXOnlyPublicKey {
|
impl AsRef<[u8; 32]> for SerializedXOnlyPublicKey {
|
||||||
fn as_ref(&self) -> &[u8; 32] {
|
fn as_ref(&self) -> &[u8; 32] { self.as_byte_array() }
|
||||||
self.as_byte_array()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&SerializedXOnlyPublicKey> for SerializedXOnlyPublicKey {
|
impl From<&SerializedXOnlyPublicKey> for SerializedXOnlyPublicKey {
|
||||||
fn from(borrowed: &SerializedXOnlyPublicKey) -> Self {
|
fn from(borrowed: &SerializedXOnlyPublicKey) -> Self { *borrowed }
|
||||||
*borrowed
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for SerializedXOnlyPublicKey {
|
impl fmt::Debug for SerializedXOnlyPublicKey {
|
||||||
|
|
|
@ -9,8 +9,8 @@ use core::fmt;
|
||||||
|
|
||||||
#[cfg(feature = "arbitrary")]
|
#[cfg(feature = "arbitrary")]
|
||||||
use arbitrary::{Arbitrary, Unstructured};
|
use arbitrary::{Arbitrary, Unstructured};
|
||||||
use internals::write_err;
|
|
||||||
use internals::array::ArrayExt;
|
use internals::array::ArrayExt;
|
||||||
|
use internals::write_err;
|
||||||
use io::Write;
|
use io::Write;
|
||||||
|
|
||||||
use crate::prelude::Vec;
|
use crate::prelude::Vec;
|
||||||
|
|
|
@ -4,13 +4,10 @@
|
||||||
//!
|
//!
|
||||||
//! This module is deprecated. You can find hash types in their respective, hopefully obvious, modules.
|
//! This module is deprecated. You can find hash types in their respective, hopefully obvious, modules.
|
||||||
|
|
||||||
#[deprecated(since = "TBD", note = "use `crate::T` instead")]
|
|
||||||
pub use crate::{
|
|
||||||
BlockHash, TxMerkleNode, Txid, WitnessCommitment, WitnessMerkleNode,
|
|
||||||
Wtxid,
|
|
||||||
};
|
|
||||||
#[deprecated(since = "TBD", note = "use `crate::T` instead")]
|
#[deprecated(since = "TBD", note = "use `crate::T` instead")]
|
||||||
pub use crate::bip158::{FilterHash, FilterHeader};
|
pub use crate::bip158::{FilterHash, FilterHeader};
|
||||||
|
#[deprecated(since = "TBD", note = "use `crate::T` instead")]
|
||||||
|
pub use crate::{BlockHash, TxMerkleNode, Txid, WitnessCommitment, WitnessMerkleNode, Wtxid};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
|
@ -702,7 +702,7 @@ mod test {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::bip152::BlockTransactionsRequest;
|
use crate::bip152::BlockTransactionsRequest;
|
||||||
use crate::bip158::{FilterHeader, FilterHash};
|
use crate::bip158::{FilterHash, FilterHeader};
|
||||||
use crate::block::{Block, BlockHash};
|
use crate::block::{Block, BlockHash};
|
||||||
use crate::consensus::encode::{deserialize, deserialize_partial, serialize};
|
use crate::consensus::encode::{deserialize, deserialize_partial, serialize};
|
||||||
use crate::p2p::address::AddrV2;
|
use crate::p2p::address::AddrV2;
|
||||||
|
@ -738,19 +738,25 @@ mod test {
|
||||||
45,
|
45,
|
||||||
Address::new(&([123, 255, 000, 100], 833).into(), ServiceFlags::NETWORK),
|
Address::new(&([123, 255, 000, 100], 833).into(), ServiceFlags::NETWORK),
|
||||||
)]),
|
)]),
|
||||||
NetworkMessage::Inv(vec![Inventory::Block(BlockHash::from_byte_array(hash([8u8; 32]).to_byte_array()))]),
|
NetworkMessage::Inv(vec![Inventory::Block(BlockHash::from_byte_array(
|
||||||
NetworkMessage::GetData(vec![Inventory::Transaction(Txid::from_byte_array(hash([45u8; 32]).to_byte_array()))]),
|
hash([8u8; 32]).to_byte_array(),
|
||||||
|
))]),
|
||||||
|
NetworkMessage::GetData(vec![Inventory::Transaction(Txid::from_byte_array(
|
||||||
|
hash([45u8; 32]).to_byte_array(),
|
||||||
|
))]),
|
||||||
NetworkMessage::NotFound(vec![Inventory::Error([0u8; 32])]),
|
NetworkMessage::NotFound(vec![Inventory::Error([0u8; 32])]),
|
||||||
NetworkMessage::GetBlocks(GetBlocksMessage::new(
|
NetworkMessage::GetBlocks(GetBlocksMessage::new(
|
||||||
vec![
|
vec![
|
||||||
BlockHash::from_byte_array(hash([1u8; 32]).to_byte_array()),
|
BlockHash::from_byte_array(hash([1u8; 32]).to_byte_array()),
|
||||||
BlockHash::from_byte_array(hash([4u8; 32]).to_byte_array())],
|
BlockHash::from_byte_array(hash([4u8; 32]).to_byte_array()),
|
||||||
|
],
|
||||||
BlockHash::from_byte_array(hash([5u8; 32]).to_byte_array()),
|
BlockHash::from_byte_array(hash([5u8; 32]).to_byte_array()),
|
||||||
)),
|
)),
|
||||||
NetworkMessage::GetHeaders(GetHeadersMessage::new(
|
NetworkMessage::GetHeaders(GetHeadersMessage::new(
|
||||||
vec![
|
vec![
|
||||||
BlockHash::from_byte_array(hash([10u8; 32]).to_byte_array()),
|
BlockHash::from_byte_array(hash([10u8; 32]).to_byte_array()),
|
||||||
BlockHash::from_byte_array(hash([40u8; 32]).to_byte_array())],
|
BlockHash::from_byte_array(hash([40u8; 32]).to_byte_array()),
|
||||||
|
],
|
||||||
BlockHash::from_byte_array(hash([50u8; 32]).to_byte_array()),
|
BlockHash::from_byte_array(hash([50u8; 32]).to_byte_array()),
|
||||||
)),
|
)),
|
||||||
NetworkMessage::MemPool,
|
NetworkMessage::MemPool,
|
||||||
|
@ -791,8 +797,13 @@ mod test {
|
||||||
NetworkMessage::CFHeaders(CFHeaders {
|
NetworkMessage::CFHeaders(CFHeaders {
|
||||||
filter_type: 13,
|
filter_type: 13,
|
||||||
stop_hash: BlockHash::from_byte_array(hash([53u8; 32]).to_byte_array()),
|
stop_hash: BlockHash::from_byte_array(hash([53u8; 32]).to_byte_array()),
|
||||||
previous_filter_header: FilterHeader::from_byte_array(hash([12u8; 32]).to_byte_array()),
|
previous_filter_header: FilterHeader::from_byte_array(
|
||||||
filter_hashes: vec![FilterHash::from_byte_array(hash([4u8; 32]).to_byte_array()), FilterHash::from_byte_array(hash([12u8; 32]).to_byte_array())],
|
hash([12u8; 32]).to_byte_array(),
|
||||||
|
),
|
||||||
|
filter_hashes: vec![
|
||||||
|
FilterHash::from_byte_array(hash([4u8; 32]).to_byte_array()),
|
||||||
|
FilterHash::from_byte_array(hash([12u8; 32]).to_byte_array()),
|
||||||
|
],
|
||||||
}),
|
}),
|
||||||
NetworkMessage::GetCFCheckpt(GetCFCheckpt {
|
NetworkMessage::GetCFCheckpt(GetCFCheckpt {
|
||||||
filter_type: 17,
|
filter_type: 17,
|
||||||
|
@ -801,7 +812,10 @@ mod test {
|
||||||
NetworkMessage::CFCheckpt(CFCheckpt {
|
NetworkMessage::CFCheckpt(CFCheckpt {
|
||||||
filter_type: 27,
|
filter_type: 27,
|
||||||
stop_hash: BlockHash::from_byte_array(hash([77u8; 32]).to_byte_array()),
|
stop_hash: BlockHash::from_byte_array(hash([77u8; 32]).to_byte_array()),
|
||||||
filter_headers: vec![FilterHeader::from_byte_array(hash([3u8; 32]).to_byte_array()), FilterHeader::from_byte_array(hash([99u8; 32]).to_byte_array())],
|
filter_headers: vec![
|
||||||
|
FilterHeader::from_byte_array(hash([3u8; 32]).to_byte_array()),
|
||||||
|
FilterHeader::from_byte_array(hash([99u8; 32]).to_byte_array()),
|
||||||
|
],
|
||||||
}),
|
}),
|
||||||
NetworkMessage::Alert(vec![45, 66, 3, 2, 6, 8, 9, 12, 3, 130]),
|
NetworkMessage::Alert(vec![45, 66, 3, 2, 6, 8, 9, 12, 3, 130]),
|
||||||
NetworkMessage::Reject(Reject {
|
NetworkMessage::Reject(Reject {
|
||||||
|
|
|
@ -216,8 +216,8 @@ impl Serialize for KeySource {
|
||||||
|
|
||||||
impl Deserialize for KeySource {
|
impl Deserialize for KeySource {
|
||||||
fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
|
fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
|
||||||
let (fingerprint, mut d) = bytes.split_first_chunk::<4>()
|
let (fingerprint, mut d) =
|
||||||
.ok_or(io::Error::from(io::ErrorKind::UnexpectedEof))?;
|
bytes.split_first_chunk::<4>().ok_or(io::Error::from(io::ErrorKind::UnexpectedEof))?;
|
||||||
|
|
||||||
let fprint: Fingerprint = fingerprint.into();
|
let fprint: Fingerprint = fingerprint.into();
|
||||||
let mut dpath: Vec<ChildNumber> = Default::default();
|
let mut dpath: Vec<ChildNumber> = Default::default();
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
use core::borrow::{Borrow, BorrowMut};
|
use core::borrow::{Borrow, BorrowMut};
|
||||||
|
|
||||||
use internals::slice::SliceExt;
|
use internals::slice::SliceExt;
|
||||||
|
|
||||||
use super::{DecodeError, InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TaprootMerkleBranchBuf, TapNodeHash, TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE};
|
|
||||||
|
|
||||||
pub use privacy_boundary::TaprootMerkleBranch;
|
pub use privacy_boundary::TaprootMerkleBranch;
|
||||||
|
|
||||||
|
use super::{
|
||||||
|
DecodeError, InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TapNodeHash,
|
||||||
|
TaprootMerkleBranchBuf, TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE,
|
||||||
|
};
|
||||||
|
|
||||||
/// Makes sure only the allowed conversions are accessible to external code.
|
/// Makes sure only the allowed conversions are accessible to external code.
|
||||||
mod privacy_boundary {
|
mod privacy_boundary {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -33,9 +36,7 @@ mod privacy_boundary {
|
||||||
|
|
||||||
impl TaprootMerkleBranch {
|
impl TaprootMerkleBranch {
|
||||||
/// Returns an empty branch.
|
/// Returns an empty branch.
|
||||||
pub const fn new() -> &'static Self {
|
pub const fn new() -> &'static Self { Self::from_hashes_unchecked(&[]) }
|
||||||
Self::from_hashes_unchecked(&[])
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the number of nodes in this Merkle proof.
|
/// Returns the number of nodes in this Merkle proof.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
@ -91,7 +92,9 @@ impl TaprootMerkleBranch {
|
||||||
/// Decodes a byte slice that is statically known to be multiple of 32.
|
/// Decodes a byte slice that is statically known to be multiple of 32.
|
||||||
///
|
///
|
||||||
/// This can be used as a building block for other ways of decoding.
|
/// This can be used as a building block for other ways of decoding.
|
||||||
fn decode_exact(nodes: &[[u8; TAPROOT_CONTROL_NODE_SIZE]]) -> Result<&Self, InvalidMerkleTreeDepthError> {
|
fn decode_exact(
|
||||||
|
nodes: &[[u8; TAPROOT_CONTROL_NODE_SIZE]],
|
||||||
|
) -> Result<&Self, InvalidMerkleTreeDepthError> {
|
||||||
// SAFETY:
|
// SAFETY:
|
||||||
// The lifetime of the returned reference is the same as the lifetime of the input
|
// The lifetime of the returned reference is the same as the lifetime of the input
|
||||||
// reference, the size of `TapNodeHash` is equal to `TAPROOT_CONTROL_NODE_SIZE` and the
|
// reference, the size of `TapNodeHash` is equal to `TAPROOT_CONTROL_NODE_SIZE` and the
|
||||||
|
@ -99,7 +102,7 @@ impl TaprootMerkleBranch {
|
||||||
Self::from_hashes(unsafe { &*(nodes as *const _ as *const [TapNodeHash]) })
|
Self::from_hashes(unsafe { &*(nodes as *const _ as *const [TapNodeHash]) })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_hashes(nodes: &[TapNodeHash]) -> Result<&Self, InvalidMerkleTreeDepthError>{
|
fn from_hashes(nodes: &[TapNodeHash]) -> Result<&Self, InvalidMerkleTreeDepthError> {
|
||||||
if nodes.len() <= TAPROOT_CONTROL_MAX_NODE_COUNT {
|
if nodes.len() <= TAPROOT_CONTROL_MAX_NODE_COUNT {
|
||||||
Ok(Self::from_hashes_unchecked(nodes))
|
Ok(Self::from_hashes_unchecked(nodes))
|
||||||
} else {
|
} else {
|
||||||
|
@ -109,21 +112,15 @@ impl TaprootMerkleBranch {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for &'_ TaprootMerkleBranch {
|
impl Default for &'_ TaprootMerkleBranch {
|
||||||
fn default() -> Self {
|
fn default() -> Self { TaprootMerkleBranch::new() }
|
||||||
TaprootMerkleBranch::new()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<TaprootMerkleBranch> for TaprootMerkleBranch {
|
impl AsRef<TaprootMerkleBranch> for TaprootMerkleBranch {
|
||||||
fn as_ref(&self) -> &TaprootMerkleBranch {
|
fn as_ref(&self) -> &TaprootMerkleBranch { self }
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsMut<TaprootMerkleBranch> for TaprootMerkleBranch {
|
impl AsMut<TaprootMerkleBranch> for TaprootMerkleBranch {
|
||||||
fn as_mut(&mut self) -> &mut TaprootMerkleBranch {
|
fn as_mut(&mut self) -> &mut TaprootMerkleBranch { self }
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<TaprootMerkleBranch> for TaprootMerkleBranchBuf {
|
impl AsRef<TaprootMerkleBranch> for TaprootMerkleBranchBuf {
|
||||||
|
@ -248,18 +245,14 @@ impl alloc::borrow::ToOwned for TaprootMerkleBranch {
|
||||||
// `Cow`.
|
// `Cow`.
|
||||||
type Owned = TaprootMerkleBranchBuf;
|
type Owned = TaprootMerkleBranchBuf;
|
||||||
|
|
||||||
fn to_owned(&self) -> Self::Owned {
|
fn to_owned(&self) -> Self::Owned { self.into() }
|
||||||
self.into()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a TaprootMerkleBranch {
|
impl<'a> IntoIterator for &'a TaprootMerkleBranch {
|
||||||
type IntoIter = core::slice::Iter<'a, TapNodeHash>;
|
type IntoIter = core::slice::Iter<'a, TapNodeHash>;
|
||||||
type Item = &'a TapNodeHash;
|
type Item = &'a TapNodeHash;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter { self.as_slice().iter() }
|
||||||
self.as_slice().iter()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a mut TaprootMerkleBranch {
|
impl<'a> IntoIterator for &'a mut TaprootMerkleBranch {
|
||||||
|
@ -274,7 +267,10 @@ impl<'a> IntoIterator for &'a mut TaprootMerkleBranch {
|
||||||
mod tests {
|
mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn alignment() {
|
fn alignment() {
|
||||||
assert!(core::mem::align_of_val(super::TaprootMerkleBranch::new()) == core::mem::align_of::<u8>());
|
assert!(
|
||||||
|
core::mem::align_of_val(super::TaprootMerkleBranch::new())
|
||||||
|
== core::mem::align_of::<u8>()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const _: () = {
|
const _: () = {
|
||||||
|
|
|
@ -86,7 +86,10 @@ impl TaprootMerkleBranchBuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Appends elements to proof.
|
/// Appends elements to proof.
|
||||||
pub(in super::super) fn push(&mut self, h: TapNodeHash) -> Result<(), InvalidMerkleTreeDepthError> {
|
pub(in super::super) fn push(
|
||||||
|
&mut self,
|
||||||
|
h: TapNodeHash,
|
||||||
|
) -> Result<(), InvalidMerkleTreeDepthError> {
|
||||||
if self.len() >= TAPROOT_CONTROL_MAX_NODE_COUNT {
|
if self.len() >= TAPROOT_CONTROL_MAX_NODE_COUNT {
|
||||||
Err(InvalidMerkleTreeDepthError(self.0.len()))
|
Err(InvalidMerkleTreeDepthError(self.0.len()))
|
||||||
} else {
|
} else {
|
||||||
|
@ -213,9 +216,7 @@ impl BorrowMut<[TapNodeHash]> for TaprootMerkleBranchBuf {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> From<&'a TaprootMerkleBranch> for TaprootMerkleBranchBuf {
|
impl<'a> From<&'a TaprootMerkleBranch> for TaprootMerkleBranchBuf {
|
||||||
fn from(value: &'a TaprootMerkleBranch) -> Self {
|
fn from(value: &'a TaprootMerkleBranch) -> Self { Self(value.as_slice().into()) }
|
||||||
Self(value.as_slice().into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterator over node hashes within Taproot Merkle branch.
|
/// Iterator over node hashes within Taproot Merkle branch.
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
//! Contains `TaprootMerkleBranchBuf` and its associated types.
|
//! Contains `TaprootMerkleBranchBuf` and its associated types.
|
||||||
|
|
||||||
mod buf;
|
|
||||||
mod borrowed;
|
mod borrowed;
|
||||||
|
mod buf;
|
||||||
pub use buf::TaprootMerkleBranchBuf;
|
|
||||||
pub use borrowed::TaprootMerkleBranch;
|
|
||||||
|
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
|
||||||
|
pub use borrowed::TaprootMerkleBranch;
|
||||||
|
pub use buf::TaprootMerkleBranchBuf;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TapNodeHash, TaprootError,
|
InvalidMerkleBranchSizeError, InvalidMerkleTreeDepthError, TapNodeHash, TaprootError,
|
||||||
TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE,
|
TAPROOT_CONTROL_MAX_NODE_COUNT, TAPROOT_CONTROL_NODE_SIZE,
|
||||||
|
@ -28,27 +29,30 @@ pub struct DecodeError {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<InvalidMerkleBranchSizeError> for DecodeError {
|
impl From<InvalidMerkleBranchSizeError> for DecodeError {
|
||||||
fn from(value: InvalidMerkleBranchSizeError) -> Self {
|
fn from(value: InvalidMerkleBranchSizeError) -> Self { Self { num_bytes: value.0 } }
|
||||||
Self {
|
|
||||||
num_bytes: value.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<InvalidMerkleTreeDepthError> for DecodeError {
|
impl From<InvalidMerkleTreeDepthError> for DecodeError {
|
||||||
fn from(value: InvalidMerkleTreeDepthError) -> Self {
|
fn from(value: InvalidMerkleTreeDepthError) -> Self {
|
||||||
Self {
|
Self { num_bytes: value.0 * TAPROOT_CONTROL_NODE_SIZE }
|
||||||
num_bytes: value.0 * TAPROOT_CONTROL_NODE_SIZE,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for DecodeError {
|
impl fmt::Display for DecodeError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
if self.num_bytes % TAPROOT_CONTROL_NODE_SIZE == 0 {
|
if self.num_bytes % TAPROOT_CONTROL_NODE_SIZE == 0 {
|
||||||
write!(f, "the Merkle branch has {} nodes which is more than the limit {}", self.num_bytes / TAPROOT_CONTROL_NODE_SIZE, TAPROOT_CONTROL_MAX_NODE_COUNT)
|
write!(
|
||||||
|
f,
|
||||||
|
"the Merkle branch has {} nodes which is more than the limit {}",
|
||||||
|
self.num_bytes / TAPROOT_CONTROL_NODE_SIZE,
|
||||||
|
TAPROOT_CONTROL_MAX_NODE_COUNT
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "the Merkle branch is {} bytes long which is not an integer multiple of {}", self.num_bytes, TAPROOT_CONTROL_NODE_SIZE)
|
write!(
|
||||||
|
f,
|
||||||
|
"the Merkle branch is {} bytes long which is not an integer multiple of {}",
|
||||||
|
self.num_bytes, TAPROOT_CONTROL_NODE_SIZE
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,15 +14,16 @@ use core::iter::FusedIterator;
|
||||||
|
|
||||||
use hashes::{hash_newtype, sha256t, sha256t_tag, HashEngine};
|
use hashes::{hash_newtype, sha256t, sha256t_tag, HashEngine};
|
||||||
use internals::array::ArrayExt;
|
use internals::array::ArrayExt;
|
||||||
use internals::{impl_to_hex_from_lower_hex, write_err};
|
|
||||||
#[allow(unused)] // MSRV polyfill
|
#[allow(unused)] // MSRV polyfill
|
||||||
use internals::slice::SliceExt;
|
use internals::slice::SliceExt;
|
||||||
|
use internals::{impl_to_hex_from_lower_hex, write_err};
|
||||||
use io::Write;
|
use io::Write;
|
||||||
use secp256k1::{Scalar, Secp256k1};
|
use secp256k1::{Scalar, Secp256k1};
|
||||||
|
|
||||||
use crate::consensus::Encodable;
|
use crate::consensus::Encodable;
|
||||||
use crate::crypto::key::{SerializedXOnlyPublicKey, TapTweak, TweakedPublicKey, UntweakedPublicKey, XOnlyPublicKey};
|
use crate::crypto::key::{
|
||||||
|
SerializedXOnlyPublicKey, TapTweak, TweakedPublicKey, UntweakedPublicKey, XOnlyPublicKey,
|
||||||
|
};
|
||||||
use crate::prelude::{BTreeMap, BTreeSet, BinaryHeap, Vec};
|
use crate::prelude::{BTreeMap, BTreeSet, BinaryHeap, Vec};
|
||||||
use crate::{Script, ScriptBuf};
|
use crate::{Script, ScriptBuf};
|
||||||
|
|
||||||
|
@ -31,9 +32,9 @@ use crate::{Script, ScriptBuf};
|
||||||
#[doc(inline)]
|
#[doc(inline)]
|
||||||
pub use crate::crypto::taproot::{SigFromSliceError, Signature};
|
pub use crate::crypto::taproot::{SigFromSliceError, Signature};
|
||||||
#[doc(inline)]
|
#[doc(inline)]
|
||||||
pub use merkle_branch::TaprootMerkleBranchBuf;
|
|
||||||
#[doc(inline)]
|
|
||||||
pub use merkle_branch::TaprootMerkleBranch;
|
pub use merkle_branch::TaprootMerkleBranch;
|
||||||
|
#[doc(inline)]
|
||||||
|
pub use merkle_branch::TaprootMerkleBranchBuf;
|
||||||
|
|
||||||
type ControlBlockArrayVec = internals::array_vec::ArrayVec<u8, TAPROOT_CONTROL_MAX_SIZE>;
|
type ControlBlockArrayVec = internals::array_vec::ArrayVec<u8, TAPROOT_CONTROL_MAX_SIZE>;
|
||||||
|
|
||||||
|
@ -1141,7 +1142,10 @@ impl<'leaf> ScriptLeaf<'leaf> {
|
||||||
/// Control block data structure used in Tapscript satisfaction.
|
/// Control block data structure used in Tapscript satisfaction.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
pub struct ControlBlock<Branch = TaprootMerkleBranchBuf, Key = UntweakedPublicKey> where Branch: ?Sized {
|
pub struct ControlBlock<Branch = TaprootMerkleBranchBuf, Key = UntweakedPublicKey>
|
||||||
|
where
|
||||||
|
Branch: ?Sized,
|
||||||
|
{
|
||||||
/// The tapleaf version.
|
/// The tapleaf version.
|
||||||
pub leaf_version: LeafVersion,
|
pub leaf_version: LeafVersion,
|
||||||
/// The parity of the output key (NOT THE INTERNAL KEY WHICH IS ALWAYS XONLY).
|
/// The parity of the output key (NOT THE INTERNAL KEY WHICH IS ALWAYS XONLY).
|
||||||
|
@ -1168,12 +1172,8 @@ impl ControlBlock {
|
||||||
pub fn decode(sl: &[u8]) -> Result<ControlBlock, TaprootError> {
|
pub fn decode(sl: &[u8]) -> Result<ControlBlock, TaprootError> {
|
||||||
use alloc::borrow::ToOwned;
|
use alloc::borrow::ToOwned;
|
||||||
|
|
||||||
let ControlBlock {
|
let ControlBlock { leaf_version, output_key_parity, internal_key, merkle_branch } =
|
||||||
leaf_version,
|
ControlBlock::<&TaprootMerkleBranch, &SerializedXOnlyPublicKey>::decode_borrowed(sl)?;
|
||||||
output_key_parity,
|
|
||||||
internal_key,
|
|
||||||
merkle_branch,
|
|
||||||
} = ControlBlock::<&TaprootMerkleBranch, &SerializedXOnlyPublicKey>::decode_borrowed(sl)?;
|
|
||||||
|
|
||||||
let internal_key = internal_key.to_validated().map_err(TaprootError::InvalidInternalKey)?;
|
let internal_key = internal_key.to_validated().map_err(TaprootError::InvalidInternalKey)?;
|
||||||
let merkle_branch = merkle_branch.to_owned();
|
let merkle_branch = merkle_branch.to_owned();
|
||||||
|
@ -1183,8 +1183,13 @@ impl ControlBlock {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B, K> ControlBlock<B, K> {
|
impl<B, K> ControlBlock<B, K> {
|
||||||
pub(crate) fn decode_borrowed<'a>(sl: &'a [u8]) -> Result<Self, TaprootError> where B: From<&'a TaprootMerkleBranch>, K: From<&'a SerializedXOnlyPublicKey> {
|
pub(crate) fn decode_borrowed<'a>(sl: &'a [u8]) -> Result<Self, TaprootError>
|
||||||
let (base, merkle_branch) = sl.split_first_chunk::<TAPROOT_CONTROL_BASE_SIZE>()
|
where
|
||||||
|
B: From<&'a TaprootMerkleBranch>,
|
||||||
|
K: From<&'a SerializedXOnlyPublicKey>,
|
||||||
|
{
|
||||||
|
let (base, merkle_branch) = sl
|
||||||
|
.split_first_chunk::<TAPROOT_CONTROL_BASE_SIZE>()
|
||||||
.ok_or(InvalidControlBlockSizeError(sl.len()))?;
|
.ok_or(InvalidControlBlockSizeError(sl.len()))?;
|
||||||
|
|
||||||
let (&first, internal_key) = base.split_first();
|
let (&first, internal_key) = base.split_first();
|
||||||
|
@ -1223,7 +1228,8 @@ impl<Branch: AsRef<TaprootMerkleBranch> + ?Sized> ControlBlock<Branch> {
|
||||||
self.encode_inner(|bytes| -> Result<(), core::convert::Infallible> {
|
self.encode_inner(|bytes| -> Result<(), core::convert::Infallible> {
|
||||||
result.extend_from_slice(bytes);
|
result.extend_from_slice(bytes);
|
||||||
Ok(())
|
Ok(())
|
||||||
}).unwrap_or_else(|never| match never {});
|
})
|
||||||
|
.unwrap_or_else(|never| match never {});
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,7 @@ impl<T: Hash + str::FromStr> str::FromStr for Hmac<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Hash> PartialEq for Hmac<T> {
|
impl<T: Hash> PartialEq for Hmac<T> {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool { crate::cmp::fixed_time_eq(self.as_ref(), other.as_ref()) }
|
||||||
crate::cmp::fixed_time_eq(self.as_ref(), other.as_ref())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Hash> Eq for Hmac<T> {}
|
impl<T: Hash> Eq for Hmac<T> {}
|
||||||
|
|
|
@ -134,7 +134,9 @@ impl<T: Tag> PartialOrd for Hash<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T: Tag> Ord for Hash<T> {
|
impl<T: Tag> Ord for Hash<T> {
|
||||||
fn cmp(&self, other: &Hash<T>) -> cmp::Ordering { cmp::Ord::cmp(&self.as_byte_array(), &other.as_byte_array()) }
|
fn cmp(&self, other: &Hash<T>) -> cmp::Ordering {
|
||||||
|
cmp::Ord::cmp(&self.as_byte_array(), &other.as_byte_array())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl<T: Tag> core::hash::Hash for Hash<T> {
|
impl<T: Tag> core::hash::Hash for Hash<T> {
|
||||||
fn hash<H: core::hash::Hasher>(&self, h: &mut H) { self.as_byte_array().hash(h) }
|
fn hash<H: core::hash::Hasher>(&self, h: &mut H) { self.as_byte_array().hash(h) }
|
||||||
|
|
|
@ -17,9 +17,7 @@ pub trait ArrayExt {
|
||||||
/// Returns an item at given statically-known index.
|
/// Returns an item at given statically-known index.
|
||||||
///
|
///
|
||||||
/// This is just like normal indexing except the check happens at compile time.
|
/// This is just like normal indexing except the check happens at compile time.
|
||||||
fn get_static<const INDEX: usize>(&self) -> &Self::Item {
|
fn get_static<const INDEX: usize>(&self) -> &Self::Item { &self.sub_array::<INDEX, 1>()[0] }
|
||||||
&self.sub_array::<INDEX, 1>()[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the first item in an array.
|
/// Returns the first item in an array.
|
||||||
///
|
///
|
||||||
|
@ -30,9 +28,7 @@ pub trait ArrayExt {
|
||||||
/// that this will not return `None` so trying to keep the `std` method around is pointless.
|
/// that this will not return `None` so trying to keep the `std` method around is pointless.
|
||||||
/// Importing the trait will also cause compile failures - that's also intentional to expose
|
/// Importing the trait will also cause compile failures - that's also intentional to expose
|
||||||
/// the places where useless checks are made.
|
/// the places where useless checks are made.
|
||||||
fn first(&self) -> &Self::Item {
|
fn first(&self) -> &Self::Item { self.get_static::<0>() }
|
||||||
self.get_static::<0>()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Splits the array into two, non-overlaping smaller arrays covering the entire range.
|
/// Splits the array into two, non-overlaping smaller arrays covering the entire range.
|
||||||
///
|
///
|
||||||
|
@ -40,7 +36,9 @@ pub trait ArrayExt {
|
||||||
/// checks that the arrays don't overlap and that they cover the full range. This is very useful
|
/// checks that the arrays don't overlap and that they cover the full range. This is very useful
|
||||||
/// for demonstrating correctness, especially when chained. Using this technique even revealed
|
/// for demonstrating correctness, especially when chained. Using this technique even revealed
|
||||||
/// a bug in the past. ([#4195](https://github.com/rust-bitcoin/rust-bitcoin/issues/4195))
|
/// a bug in the past. ([#4195](https://github.com/rust-bitcoin/rust-bitcoin/issues/4195))
|
||||||
fn split_array<const LEFT: usize, const RIGHT: usize>(&self) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]);
|
fn split_array<const LEFT: usize, const RIGHT: usize>(
|
||||||
|
&self,
|
||||||
|
) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]);
|
||||||
|
|
||||||
/// Splits the array into the first element and the remaining, one element shorter, array.
|
/// Splits the array into the first element and the remaining, one element shorter, array.
|
||||||
///
|
///
|
||||||
|
@ -84,7 +82,9 @@ impl<const N: usize, T> ArrayExt for [T; N] {
|
||||||
self[OFFSET..(OFFSET + LEN)].try_into().expect("this is also compiler-checked above")
|
self[OFFSET..(OFFSET + LEN)].try_into().expect("this is also compiler-checked above")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_array<const LEFT: usize, const RIGHT: usize>(&self) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]) {
|
fn split_array<const LEFT: usize, const RIGHT: usize>(
|
||||||
|
&self,
|
||||||
|
) -> (&[Self::Item; LEFT], &[Self::Item; RIGHT]) {
|
||||||
#[allow(clippy::let_unit_value)]
|
#[allow(clippy::let_unit_value)]
|
||||||
let _ = Hack2::<N, LEFT, RIGHT>::IS_FULL_RANGE;
|
let _ = Hack2::<N, LEFT, RIGHT>::IS_FULL_RANGE;
|
||||||
|
|
||||||
|
|
|
@ -209,10 +209,12 @@ macro_rules! _check_tts_eq {
|
||||||
($left:tt, $right:tt, $message:literal) => {
|
($left:tt, $right:tt, $message:literal) => {
|
||||||
macro_rules! token_eq {
|
macro_rules! token_eq {
|
||||||
($right) => {};
|
($right) => {};
|
||||||
($any:tt) => { compile_error!($message) };
|
($any:tt) => {
|
||||||
|
compile_error!($message)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
token_eq!($left);
|
token_eq!($left);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
|
|
|
@ -37,14 +37,18 @@ pub trait SliceExt {
|
||||||
/// Returns `None` if the slice is shorter than `ARRAY_LEN`
|
/// Returns `None` if the slice is shorter than `ARRAY_LEN`
|
||||||
#[allow(clippy::type_complexity)] // it's not really complex and redefining would make it
|
#[allow(clippy::type_complexity)] // it's not really complex and redefining would make it
|
||||||
// harder to understand
|
// harder to understand
|
||||||
fn split_first_chunk<const ARRAY_LEN: usize>(&self) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])>;
|
fn split_first_chunk<const ARRAY_LEN: usize>(
|
||||||
|
&self,
|
||||||
|
) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])>;
|
||||||
|
|
||||||
/// Splits the slice into a remainder and an array if it's long enough.
|
/// Splits the slice into a remainder and an array if it's long enough.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the slice is shorter than `ARRAY_LEN`
|
/// Returns `None` if the slice is shorter than `ARRAY_LEN`
|
||||||
#[allow(clippy::type_complexity)] // it's not really complex and redefining would make it
|
#[allow(clippy::type_complexity)] // it's not really complex and redefining would make it
|
||||||
// harder to understand
|
// harder to understand
|
||||||
fn split_last_chunk<const ARRAY_LEN: usize>(&self) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])>;
|
fn split_last_chunk<const ARRAY_LEN: usize>(
|
||||||
|
&self,
|
||||||
|
) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> SliceExt for [T] {
|
impl<T> SliceExt for [T] {
|
||||||
|
@ -90,11 +94,16 @@ impl<T> SliceExt for [T] {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_array<const ARRAY_LEN: usize>(&self, offset: usize) -> Option<&[Self::Item; ARRAY_LEN]> {
|
fn get_array<const ARRAY_LEN: usize>(&self, offset: usize) -> Option<&[Self::Item; ARRAY_LEN]> {
|
||||||
self.get(offset..(offset + ARRAY_LEN))
|
self.get(offset..(offset + ARRAY_LEN)).map(|slice| {
|
||||||
.map(|slice| slice.try_into().expect("the arguments to `get` evaluate to the same length the return type uses"))
|
slice
|
||||||
|
.try_into()
|
||||||
|
.expect("the arguments to `get` evaluate to the same length the return type uses")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_first_chunk<const ARRAY_LEN: usize>(&self) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])> {
|
fn split_first_chunk<const ARRAY_LEN: usize>(
|
||||||
|
&self,
|
||||||
|
) -> Option<(&[Self::Item; ARRAY_LEN], &[Self::Item])> {
|
||||||
if self.len() < ARRAY_LEN {
|
if self.len() < ARRAY_LEN {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -102,12 +111,17 @@ impl<T> SliceExt for [T] {
|
||||||
Some((first.try_into().expect("we're passing `ARRAY_LEN` to `split_at` above"), remainder))
|
Some((first.try_into().expect("we're passing `ARRAY_LEN` to `split_at` above"), remainder))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_last_chunk<const ARRAY_LEN: usize>(&self) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])> {
|
fn split_last_chunk<const ARRAY_LEN: usize>(
|
||||||
|
&self,
|
||||||
|
) -> Option<(&[Self::Item], &[Self::Item; ARRAY_LEN])> {
|
||||||
if self.len() < ARRAY_LEN {
|
if self.len() < ARRAY_LEN {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let (remainder, last) = self.split_at(self.len() - ARRAY_LEN);
|
let (remainder, last) = self.split_at(self.len() - ARRAY_LEN);
|
||||||
Some((remainder, last.try_into().expect("we're passing `self.len() - ARRAY_LEN` to `split_at` above")))
|
Some((
|
||||||
|
remainder,
|
||||||
|
last.try_into().expect("we're passing `self.len() - ARRAY_LEN` to `split_at` above"),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -486,7 +486,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn from_seconds_ceil_and_floor() {
|
fn from_seconds_ceil_and_floor() {
|
||||||
let time = 70*512+1;
|
let time = 70 * 512 + 1;
|
||||||
let lock_by_time = LockTime::from_seconds_ceil(time).unwrap();
|
let lock_by_time = LockTime::from_seconds_ceil(time).unwrap();
|
||||||
assert_eq!(lock_by_time, LockTime::from_512_second_intervals(71));
|
assert_eq!(lock_by_time, LockTime::from_512_second_intervals(71));
|
||||||
|
|
||||||
|
@ -494,7 +494,7 @@ mod tests {
|
||||||
assert_eq!(lock_by_time, LockTime::from_512_second_intervals(70));
|
assert_eq!(lock_by_time, LockTime::from_512_second_intervals(70));
|
||||||
|
|
||||||
let mut max_time = 0xffff * 512;
|
let mut max_time = 0xffff * 512;
|
||||||
assert_eq!(LockTime::from_seconds_ceil(max_time),LockTime::from_seconds_floor(max_time));
|
assert_eq!(LockTime::from_seconds_ceil(max_time), LockTime::from_seconds_floor(max_time));
|
||||||
max_time += 512;
|
max_time += 512;
|
||||||
assert!(LockTime::from_seconds_ceil(max_time).is_err());
|
assert!(LockTime::from_seconds_ceil(max_time).is_err());
|
||||||
assert!(LockTime::from_seconds_floor(max_time).is_err());
|
assert!(LockTime::from_seconds_floor(max_time).is_err());
|
||||||
|
|
|
@ -276,16 +276,12 @@ impl<'a> From<&'a Script> for Cow<'a, Script> {
|
||||||
#[cfg(target_has_atomic = "ptr")]
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
impl<'a> From<&'a Script> for Arc<Script> {
|
impl<'a> From<&'a Script> for Arc<Script> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from(value: &'a Script) -> Self {
|
fn from(value: &'a Script) -> Self { Script::from_arc_bytes(Arc::from(value.as_bytes())) }
|
||||||
Script::from_arc_bytes(Arc::from(value.as_bytes()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> From<&'a Script> for Rc<Script> {
|
impl<'a> From<&'a Script> for Rc<Script> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn from(value: &'a Script) -> Self {
|
fn from(value: &'a Script) -> Self { Script::from_rc_bytes(Rc::from(value.as_bytes())) }
|
||||||
Script::from_rc_bytes(Rc::from(value.as_bytes()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Vec<u8>> for ScriptBuf {
|
impl From<Vec<u8>> for ScriptBuf {
|
||||||
|
|
|
@ -11,8 +11,8 @@ use core::ops::Index;
|
||||||
use arbitrary::{Arbitrary, Unstructured};
|
use arbitrary::{Arbitrary, Unstructured};
|
||||||
use hex::DisplayHex;
|
use hex::DisplayHex;
|
||||||
use internals::compact_size;
|
use internals::compact_size;
|
||||||
use internals::wrap_debug::WrapDebug;
|
|
||||||
use internals::slice::SliceExt;
|
use internals::slice::SliceExt;
|
||||||
|
use internals::wrap_debug::WrapDebug;
|
||||||
|
|
||||||
use crate::prelude::{Box, Vec};
|
use crate::prelude::{Box, Vec};
|
||||||
|
|
||||||
|
@ -250,95 +250,65 @@ impl<T: core::borrow::Borrow<[u8]>> PartialEq<[T]> for Witness {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<&[T]> for Witness {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<&[T]> for Witness {
|
||||||
fn eq(&self, rhs: &&[T]) -> bool {
|
fn eq(&self, rhs: &&[T]) -> bool { *self == **rhs }
|
||||||
*self == **rhs
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for [T] {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for [T] {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for &[T] {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for &[T] {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == **self }
|
||||||
*rhs == **self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<[T; N]> for Witness {
|
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<[T; N]> for Witness {
|
||||||
fn eq(&self, rhs: &[T; N]) -> bool {
|
fn eq(&self, rhs: &[T; N]) -> bool { *self == *rhs.as_slice() }
|
||||||
*self == *rhs.as_slice()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<&[T; N]> for Witness {
|
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<&[T; N]> for Witness {
|
||||||
fn eq(&self, rhs: &&[T; N]) -> bool {
|
fn eq(&self, rhs: &&[T; N]) -> bool { *self == *rhs.as_slice() }
|
||||||
*self == *rhs.as_slice()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for [T; N] {
|
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for [T; N] {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for &[T; N] {
|
impl<const N: usize, T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for &[T; N] {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == **self }
|
||||||
*rhs == **self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Vec<T>> for Witness {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Vec<T>> for Witness {
|
||||||
fn eq(&self, rhs: &Vec<T>) -> bool {
|
fn eq(&self, rhs: &Vec<T>) -> bool { *self == **rhs }
|
||||||
*self == **rhs
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for Vec<T> {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for Vec<T> {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Box<[T]>> for Witness {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Box<[T]>> for Witness {
|
||||||
fn eq(&self, rhs: &Box<[T]>) -> bool {
|
fn eq(&self, rhs: &Box<[T]>) -> bool { *self == **rhs }
|
||||||
*self == **rhs
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for Box<[T]> {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for Box<[T]> {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<alloc::rc::Rc<[T]>> for Witness {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<alloc::rc::Rc<[T]>> for Witness {
|
||||||
fn eq(&self, rhs: &alloc::rc::Rc<[T]>) -> bool {
|
fn eq(&self, rhs: &alloc::rc::Rc<[T]>) -> bool { *self == **rhs }
|
||||||
*self == **rhs
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for alloc::rc::Rc<[T]> {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for alloc::rc::Rc<[T]> {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_has_atomic = "ptr")]
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<alloc::sync::Arc<[T]>> for Witness {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<alloc::sync::Arc<[T]>> for Witness {
|
||||||
fn eq(&self, rhs: &alloc::sync::Arc<[T]>) -> bool {
|
fn eq(&self, rhs: &alloc::sync::Arc<[T]>) -> bool { *self == **rhs }
|
||||||
*self == **rhs
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_has_atomic = "ptr")]
|
#[cfg(target_has_atomic = "ptr")]
|
||||||
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for alloc::sync::Arc<[T]> {
|
impl<T: core::borrow::Borrow<[u8]>> PartialEq<Witness> for alloc::sync::Arc<[T]> {
|
||||||
fn eq(&self, rhs: &Witness) -> bool {
|
fn eq(&self, rhs: &Witness) -> bool { *rhs == *self }
|
||||||
*rhs == *self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Debug implementation that displays the witness as a structured output containing:
|
/// Debug implementation that displays the witness as a structured output containing:
|
||||||
|
@ -580,9 +550,7 @@ mod test {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A witness with a single element that is empty (zero length).
|
// A witness with a single element that is empty (zero length).
|
||||||
fn single_empty_element() -> Witness {
|
fn single_empty_element() -> Witness { Witness::from([[0u8; 0]]) }
|
||||||
Witness::from([[0u8; 0]])
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn witness_debug_can_display_empty_element() {
|
fn witness_debug_can_display_empty_element() {
|
||||||
|
@ -729,13 +697,11 @@ mod test {
|
||||||
const EMPTY_BYTES: &[u8] = &[];
|
const EMPTY_BYTES: &[u8] = &[];
|
||||||
assert_eq!(Vec::<&[u8]>::new(), Witness::new());
|
assert_eq!(Vec::<&[u8]>::new(), Witness::new());
|
||||||
macro_rules! ck {
|
macro_rules! ck {
|
||||||
($container:expr) => {
|
($container:expr) => {{
|
||||||
{
|
let container = $container;
|
||||||
let container = $container;
|
let witness = Witness::from(Clone::clone(&container));
|
||||||
let witness = Witness::from(Clone::clone(&container));
|
assert_eq!(witness, container, stringify!($container));
|
||||||
assert_eq!(witness, container, stringify!($container));
|
}};
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ck!([EMPTY_BYTES]);
|
ck!([EMPTY_BYTES]);
|
||||||
ck!([EMPTY_BYTES, EMPTY_BYTES]);
|
ck!([EMPTY_BYTES, EMPTY_BYTES]);
|
||||||
|
|
|
@ -233,10 +233,7 @@ impl SignedAmount {
|
||||||
///
|
///
|
||||||
/// Please be aware of the risk of using floating-point numbers.
|
/// Please be aware of the risk of using floating-point numbers.
|
||||||
#[cfg(feature = "alloc")]
|
#[cfg(feature = "alloc")]
|
||||||
pub fn from_float_in(
|
pub fn from_float_in(value: f64, denom: Denomination) -> Result<Self, ParseAmountError> {
|
||||||
value: f64,
|
|
||||||
denom: Denomination,
|
|
||||||
) -> Result<Self, ParseAmountError> {
|
|
||||||
// This is inefficient, but the safest way to deal with this. The parsing logic is safe.
|
// This is inefficient, but the safest way to deal with this. The parsing logic is safe.
|
||||||
// Any performance-critical application should not be dealing with floats.
|
// Any performance-critical application should not be dealing with floats.
|
||||||
Self::from_str_in(&value.to_string(), denom)
|
Self::from_str_in(&value.to_string(), denom)
|
||||||
|
|
Loading…
Reference in New Issue