Merge rust-bitcoin/rust-bitcoin#806: Use rustfmt to hint at clean ups for the codebase

a77907d59c Remove unnecessary explicit type annotation (Tobin Harding)
71cf00a314 Use less vertical lines (Tobin Harding)
a5c06e0a96 Refactor vector initialisation (Tobin Harding)
aabf2d1681 Use brace not parenth fo macro arm (Tobin Harding)
b021415a88 Use block stlye function call (Tobin Harding)
d6462bae7b Refactor usage of + (Tobin Harding)
702e8bf82d Refactor consensus_encode (Tobin Harding)
a8ed95ea07 Refactor where statements (Tobin Harding)
6d84998168 Improve braces usage (Tobin Harding)
39ec59620d Fix unusual indentation (Tobin Harding)
b9b6e7e1c6 Remove unneeded braces (Tobin Harding)
5d68ad85ed Remove unneeded return statement (Tobin Harding)
bf4f5638e0 Refactor whitespace (Tobin Harding)
1c502399f1 Remove trailing whitespace (Tobin Harding)

Pull request description:

  `rustfmt` is still under discussion, while researching the topic I came across a maintainer of another project that does not use `rustfmt` who mentioned that he manually implemented the `rusfmt` suggestions that he liked ever month or so. This seemed like a good idea so I did it. This was extremely painful but I believe I have put together a PR that is non-controversial with well separated patches.

  Totally non urgent.

ACKs for top commit:
  apoelstra:
    ACK a77907d59c
  sanket1729:
    ACK a77907d59c.

Tree-SHA512: 27aa10d1c6d02d0e5bc335a5cda9cf2664b968c298d2ea6c653b8074abf18764a9d0f19c36222852fc23b887ab64144901dae059088e61478e9a90a042221e61
This commit is contained in:
Andrew Poelstra 2022-03-17 17:37:36 +00:00
commit 7e6f514bdf
No known key found for this signature in database
GPG Key ID: C588D63CE41B97C1
30 changed files with 300 additions and 452 deletions

View File

@ -238,14 +238,14 @@ impl Block {
/// Computes the merkle root of transactions hashed for witness.
pub fn witness_root(&self) -> Option<WitnessMerkleNode> {
let hashes = self.txdata.iter().enumerate().map(|(i, t)|
let hashes = self.txdata.iter().enumerate().map(|(i, t)| {
if i == 0 {
// Replace the first hash with zeroes.
Wtxid::default().as_hash()
} else {
t.wtxid().as_hash()
}
);
});
bitcoin_merkle_root(hashes).map(|h| h.into())
}

View File

@ -89,9 +89,8 @@ impl fmt::UpperHex for Script {
impl hex::FromHex for Script {
fn from_byte_iter<I>(iter: I) -> Result<Self, hex::Error>
where I: Iterator<Item=Result<u8, hex::Error>> +
ExactSizeIterator +
DoubleEndedIterator,
where
I: Iterator<Item=Result<u8, hex::Error>> + ExactSizeIterator + DoubleEndedIterator,
{
Vec::from_byte_iter(iter).map(|v| Script(Box::<[u8]>::from(v)))
}
@ -949,7 +948,8 @@ impl_index_newtype!(Builder, u8);
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl<'de> serde::Deserialize<'de> for Script {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: serde::Deserializer<'de>,
where
D: serde::Deserializer<'de>,
{
use core::fmt::Formatter;
use hashes::hex::FromHex;
@ -965,20 +965,23 @@ impl<'de> serde::Deserialize<'de> for Script {
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
let v = Vec::from_hex(v).map_err(E::custom)?;
Ok(Script::from(v))
}
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
self.visit_str(v)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
self.visit_str(&v)
}
@ -995,7 +998,8 @@ impl<'de> serde::Deserialize<'de> for Script {
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
Ok(Script::from(v.to_vec()))
}
@ -1023,10 +1027,7 @@ impl serde::Serialize for Script {
impl Encodable for Script {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
self.0.consensus_encode(s)
}
}
@ -1354,38 +1355,19 @@ mod test {
let slop_v_nonmin: Result<Vec<Instruction>, Error> = nonminimal.instructions().collect();
let slop_v_nonmin_alt: Result<Vec<Instruction>, Error> = nonminimal_alt.instructions().collect();
assert_eq!(
v_zero.unwrap(),
vec![
Instruction::PushBytes(&[]),
]
);
assert_eq!(
v_zeropush.unwrap(),
vec![
Instruction::PushBytes(&[0]),
]
);
assert_eq!(v_zero.unwrap(), vec![Instruction::PushBytes(&[])]);
assert_eq!(v_zeropush.unwrap(), vec![Instruction::PushBytes(&[0])]);
assert_eq!(
v_min.clone().unwrap(),
vec![
Instruction::PushBytes(&[105]),
Instruction::Op(opcodes::OP_NOP3),
]
vec![Instruction::PushBytes(&[105]), Instruction::Op(opcodes::OP_NOP3)]
);
assert_eq!(
v_nonmin.err().unwrap(),
Error::NonMinimalPush
);
assert_eq!(v_nonmin.err().unwrap(), Error::NonMinimalPush);
assert_eq!(
v_nonmin_alt.clone().unwrap(),
vec![
Instruction::PushBytes(&[105, 0]),
Instruction::Op(opcodes::OP_NOP3),
]
vec![Instruction::PushBytes(&[105, 0]), Instruction::Op(opcodes::OP_NOP3)]
);
assert_eq!(v_min.clone().unwrap(), slop_v_min.unwrap());

View File

@ -59,10 +59,7 @@ impl OutPoint {
/// Creates a new [`OutPoint`].
#[inline]
pub fn new(txid: Txid, vout: u32) -> OutPoint {
OutPoint {
txid,
vout,
}
OutPoint { txid, vout }
}
/// Creates a "null" `OutPoint`.
@ -504,7 +501,9 @@ impl Transaction {
#[cfg(feature="bitcoinconsensus")]
#[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))]
pub fn verify<S>(&self, spent: S) -> Result<(), script::Error>
where S: FnMut(&OutPoint) -> Option<TxOut> {
where
S: FnMut(&OutPoint) -> Option<TxOut>
{
self.verify_with_flags(spent, ::bitcoinconsensus::VERIFY_ALL)
}
@ -513,7 +512,10 @@ impl Transaction {
#[cfg(feature="bitcoinconsensus")]
#[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))]
pub fn verify_with_flags<S, F>(&self, mut spent: S, flags: F) -> Result<(), script::Error>
where S: FnMut(&OutPoint) -> Option<TxOut>, F : Into<u32> {
where
S: FnMut(&OutPoint) -> Option<TxOut>,
F: Into<u32>
{
let tx = encode::serialize(&*self);
let flags: u32 = flags.into();
for (idx, input) in self.input.iter().enumerate() {
@ -542,10 +544,7 @@ impl Transaction {
impl_consensus_encoding!(TxOut, value, script_pubkey);
impl Encodable for OutPoint {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.txid.consensus_encode(&mut s)?;
Ok(len + self.vout.consensus_encode(s)?)
}
@ -560,10 +559,7 @@ impl Decodable for OutPoint {
}
impl Encodable for TxIn {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.previous_output.consensus_encode(&mut s)?;
len += self.script_sig.consensus_encode(&mut s)?;
@ -583,10 +579,7 @@ impl Decodable for TxIn {
}
impl Encodable for Transaction {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.version.consensus_encode(&mut s)?;
// To avoid serialization ambiguity, no inputs means we use BIP141 serialization (see
@ -643,9 +636,7 @@ impl Decodable for Transaction {
}
}
// We don't support anything else
x => {
Err(encode::Error::UnsupportedSegwitFlag(x))
}
x => Err(encode::Error::UnsupportedSegwitFlag(x)),
}
// non-segwit
} else {
@ -674,7 +665,7 @@ impl fmt::Display for NonStandardSigHashType {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for NonStandardSigHashType {}
/// Legacy Hashtype of an input's signature.
/// Legacy Hashtype of an input's signature
#[deprecated(since = "0.28.0", note = "Please use [`EcdsaSigHashType`] instead")]
pub type SigHashType = EcdsaSigHashType;
@ -1117,12 +1108,14 @@ mod tests {
#[test]
fn test_sighashtype_fromstr_display() {
let sighashtypes = vec![("SIGHASH_ALL", EcdsaSigHashType::All),
let sighashtypes = vec![
("SIGHASH_ALL", EcdsaSigHashType::All),
("SIGHASH_NONE", EcdsaSigHashType::None),
("SIGHASH_SINGLE", EcdsaSigHashType::Single),
("SIGHASH_ALL|SIGHASH_ANYONECANPAY", EcdsaSigHashType::AllPlusAnyoneCanPay),
("SIGHASH_NONE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::NonePlusAnyoneCanPay),
("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay)];
("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay)
];
for (s, sht) in sighashtypes {
assert_eq!(sht.to_string(), s);
assert_eq!(EcdsaSigHashType::from_str(s).unwrap(), sht);

View File

@ -202,10 +202,8 @@ impl Witness {
self.last = self.content.len();
let element_len_varint = VarInt(new_element.len() as u64);
let current_content_len = self.content.len();
self.content.resize(
current_content_len + element_len_varint.len() + new_element.len(),
0,
);
self.content
.resize(current_content_len + element_len_varint.len() + new_element.len(), 0);
let end_varint = current_content_len + element_len_varint.len();
element_len_varint
.consensus_encode(&mut self.content[current_content_len..end_varint])
@ -359,14 +357,9 @@ mod test {
for (i, wit_el) in tx.input[0].witness.iter().enumerate() {
assert_eq!(expected_wit[i], wit_el.to_hex());
}
assert_eq!(
expected_wit[1],
tx.input[0].witness.last().unwrap().to_hex()
);
assert_eq!(
expected_wit[0],
tx.input[0].witness.second_to_last().unwrap().to_hex()
);
assert_eq!(expected_wit[1], tx.input[0].witness.last().unwrap().to_hex());
assert_eq!(expected_wit[0], tx.input[0].witness.second_to_last().unwrap().to_hex());
let tx_bytes_back = serialize(&tx);
assert_eq!(tx_bytes_back, tx_bytes);
}

View File

@ -164,9 +164,7 @@ pub fn deserialize<T: Decodable>(data: &[u8]) -> Result<T, Error> {
/// Deserialize an object from a vector, but will not report an error if said deserialization
/// doesn't consume the entire vector.
pub fn deserialize_partial<T: Decodable>(
data: &[u8],
) -> Result<(T, usize), Error> {
pub fn deserialize_partial<T: Decodable>(data: &[u8]) -> Result<(T, usize), Error> {
let mut decoder = Cursor::new(data);
let rv = Decodable::consensus_decode(&mut decoder)?;
let consumed = decoder.position() as usize;
@ -334,7 +332,7 @@ pub struct CheckedData(pub Vec<u8>);
// Primitive types
macro_rules! impl_int_encodable {
($ty:ident, $meth_dec:ident, $meth_enc:ident) => (
($ty:ident, $meth_dec:ident, $meth_enc:ident) => {
impl Decodable for $ty {
#[inline]
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, Error> {
@ -343,15 +341,12 @@ macro_rules! impl_int_encodable{
}
impl Encodable for $ty {
#[inline]
fn consensus_encode<S: WriteExt>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: WriteExt>(&self, mut s: S) -> Result<usize, io::Error> {
s.$meth_enc(*self)?;
Ok(mem::size_of::<$ty>())
}
}
)
}
}
impl_int_encodable!(u8, read_u8, emit_u8);
@ -439,7 +434,6 @@ impl Decodable for VarInt {
}
}
// Booleans
impl Encodable for bool {
#[inline]
@ -498,13 +492,10 @@ impl Decodable for Cow<'static, str> {
// Arrays
macro_rules! impl_array {
( $size:expr ) => (
( $size:expr ) => {
impl Encodable for [u8; $size] {
#[inline]
fn consensus_encode<S: WriteExt>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: WriteExt>(&self, mut s: S) -> Result<usize, io::Error> {
s.emit_slice(&self[..])?;
Ok(self.len())
}
@ -518,7 +509,7 @@ macro_rules! impl_array {
Ok(ret)
}
}
);
};
}
impl_array!(2);
@ -554,10 +545,7 @@ macro_rules! impl_vec {
($type: ty) => {
impl Encodable for Vec<$type> {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt(self.len() as u64).consensus_encode(&mut s)?;
for c in self.iter() {
@ -712,7 +700,7 @@ impl<T: Encodable> Encodable for sync::Arc<T> {
// Tuples
macro_rules! tuple_encode {
($($x:ident),*) => (
($($x:ident),*) => {
impl <$($x: Encodable),*> Encodable for ($($x),*) {
#[inline]
#[allow(non_snake_case)]
@ -734,7 +722,7 @@ macro_rules! tuple_encode {
Ok(($({let $x = Decodable::consensus_decode(&mut d)?; $x }),*))
}
}
);
};
}
tuple_encode!(T0, T1);

View File

@ -446,9 +446,10 @@ macro_rules! impl_bytes_newtype {
impl $crate::hashes::hex::FromHex for $t {
fn from_byte_iter<I>(iter: I) -> Result<Self, $crate::hashes::hex::Error>
where I: ::core::iter::Iterator<Item=Result<u8, $crate::hashes::hex::Error>> +
::core::iter::ExactSizeIterator +
::core::iter::DoubleEndedIterator,
where
I: ::core::iter::Iterator<Item=Result<u8, $crate::hashes::hex::Error>>
+ ::core::iter::ExactSizeIterator
+ ::core::iter::DoubleEndedIterator,
{
if iter.len() == $len {
let mut ret = [0; $len];

View File

@ -58,10 +58,7 @@ impl Address {
if addr[0..3] == ONION {
return Err(io::Error::from(io::ErrorKind::AddrNotAvailable));
}
let ipv6 = Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
);
let ipv6 = Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]);
if let Some(ipv4) = ipv6.to_ipv4() {
Ok(SocketAddr::V4(SocketAddrV4::new(ipv4, self.port)))
} else {
@ -82,10 +79,7 @@ fn addr_to_be(addr: [u16; 8]) -> [u16; 8] {
impl Encodable for Address {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.services.consensus_encode(&mut s)?
+ addr_to_be(self.address).consensus_encode(&mut s)?
@ -149,10 +143,9 @@ pub enum AddrV2 {
impl Encodable for AddrV2 {
fn consensus_encode<W: io::Write>(&self, e: W) -> Result<usize, io::Error> {
fn encode_addr<W: io::Write>(mut e: W, network: u8, bytes: &[u8]) -> Result<usize, io::Error> {
let len =
network.consensus_encode(&mut e)? +
VarInt(bytes.len() as u64).consensus_encode(&mut e)? +
bytes.len();
let len = network.consensus_encode(&mut e)?
+ VarInt(bytes.len() as u64).consensus_encode(&mut e)?
+ bytes.len();
e.emit_slice(bytes)?;
Ok(len)
}
@ -194,10 +187,7 @@ impl Decodable for AddrV2 {
if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] {
return Err(encode::Error::ParseFailed("IPV4 wrapped address sent with IPv6 network id"));
}
AddrV2::Ipv6(Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
))
AddrV2::Ipv6(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
},
3 => {
if len != 10 {
@ -230,10 +220,7 @@ impl Decodable for AddrV2 {
return Err(encode::Error::ParseFailed("Invalid CJDNS address"));
}
let addr = addr_to_be(addr);
AddrV2::Cjdns(Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
))
AddrV2::Cjdns(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
},
_ => {
// len already checked above to be <= 512

View File

@ -274,10 +274,7 @@ impl ops::BitXorAssign for ServiceFlags {
impl Encodable for ServiceFlags {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
self.0.consensus_encode(&mut s)
}
}
@ -296,39 +293,16 @@ mod tests {
#[test]
fn serialize_test() {
assert_eq!(
serialize(&Network::Bitcoin.magic()),
&[0xf9, 0xbe, 0xb4, 0xd9]
);
assert_eq!(
serialize(&Network::Testnet.magic()),
&[0x0b, 0x11, 0x09, 0x07]
);
assert_eq!(
serialize(&Network::Signet.magic()),
&[0x0a, 0x03, 0xcf, 0x40]
);
assert_eq!(
serialize(&Network::Regtest.magic()),
&[0xfa, 0xbf, 0xb5, 0xda]
);
assert_eq!(serialize(&Network::Bitcoin.magic()), &[0xf9, 0xbe, 0xb4, 0xd9]);
assert_eq!(serialize(&Network::Testnet.magic()), &[0x0b, 0x11, 0x09, 0x07]);
assert_eq!(serialize(&Network::Signet.magic()), &[0x0a, 0x03, 0xcf, 0x40]);
assert_eq!(serialize(&Network::Regtest.magic()), &[0xfa, 0xbf, 0xb5, 0xda]);
assert_eq!(deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(), Some(Network::Bitcoin.magic()));
assert_eq!(deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(), Some(Network::Testnet.magic()));
assert_eq!(deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(), Some(Network::Signet.magic()));
assert_eq!(deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(), Some(Network::Regtest.magic()));
assert_eq!(
deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(),
Some(Network::Bitcoin.magic())
);
assert_eq!(
deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(),
Some(Network::Testnet.magic())
);
assert_eq!(
deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(),
Some(Network::Signet.magic())
);
assert_eq!(
deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(),
Some(Network::Regtest.magic())
);
}
#[test]
@ -385,4 +359,3 @@ mod tests {
assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string());
}
}

View File

@ -75,10 +75,7 @@ impl AsRef<str> for CommandString {
impl Encodable for CommandString {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
let mut rawbytes = [0u8; 12];
let strbytes = self.0.as_bytes();
debug_assert!(strbytes.len() <= 12);
@ -281,10 +278,7 @@ struct HeaderSerializationWrapper<'a>(&'a Vec<block::BlockHeader>);
impl<'a> Encodable for HeaderSerializationWrapper<'a> {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt(self.0.len() as u64).consensus_encode(&mut s)?;
for header in self.0.iter() {
@ -296,10 +290,7 @@ impl<'a> Encodable for HeaderSerializationWrapper<'a> {
}
impl Encodable for RawNetworkMessage {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.magic.consensus_encode(&mut s)?;
len += self.command().consensus_encode(&mut s)?;

View File

@ -54,14 +54,10 @@ pub enum Inventory {
impl Encodable for Inventory {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
macro_rules! encode_inv {
($code:expr, $item:expr) => {
u32::consensus_encode(&$code, &mut s)? +
$item.consensus_encode(&mut s)?
u32::consensus_encode(&$code, &mut s)? + $item.consensus_encode(&mut s)?
}
}
Ok(match *self {

View File

@ -222,7 +222,7 @@ mod test {
let istream = TcpStream::connect(format!("127.0.0.1:{}", port)).unwrap();
let reader = BufReader::new(istream);
return (handle, reader)
(handle, reader)
}
#[test]

View File

@ -13,8 +13,8 @@ pub mod btreemap_byte_values {
use hashes::hex::{FromHex, ToHex};
use serde;
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
{
@ -32,15 +32,16 @@ pub mod btreemap_byte_values {
}
}
pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where
pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
impl<'de, T> serde::de::Visitor<'de> for Visitor<T>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
type Value = BTreeMap<T, Vec<u8>>;
@ -79,8 +80,8 @@ pub mod btreemap_as_seq {
use prelude::*;
use serde;
pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
U: serde::Serialize,
@ -99,8 +100,8 @@ pub mod btreemap_as_seq {
}
}
pub fn deserialize<'de, D, T, U>(d: D)
-> Result<BTreeMap<T, U>, D::Error> where
pub fn deserialize<'de, D, T, U>(d: D) -> Result<BTreeMap<T, U>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>,
@ -108,7 +109,8 @@ pub mod btreemap_as_seq {
use ::core::marker::PhantomData;
struct Visitor<T, U>(PhantomData<(T, U)>);
impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U> where
impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>,
{
@ -164,8 +166,8 @@ pub mod btreemap_as_seq_byte_values {
&'a [u8],
);
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord + 'static,
{
@ -183,15 +185,16 @@ pub mod btreemap_as_seq_byte_values {
}
}
pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where
pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
impl<'de, T> serde::de::Visitor<'de> for Visitor<T>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
type Value = BTreeMap<T, Vec<u8>>;
@ -228,7 +231,8 @@ pub mod hex_bytes {
use serde;
pub fn serialize<T, S>(bytes: &T, s: S) -> Result<S::Ok, S::Error>
where T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer
where
T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer
{
// Don't do anything special when not human readable.
if !s.is_human_readable() {
@ -239,7 +243,8 @@ pub mod hex_bytes {
}
pub fn deserialize<'de, D, B>(d: D) -> Result<B, D::Error>
where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex,
where
D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex,
{
struct Visitor<B>(::core::marker::PhantomData<B>);
@ -251,7 +256,8 @@ pub mod hex_bytes {
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
if let Ok(hex) = ::core::str::from_utf8(v) {
FromHex::from_hex(hex).map_err(E::custom)
@ -261,7 +267,8 @@ pub mod hex_bytes {
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
FromHex::from_hex(v).map_err(E::custom)
}

View File

@ -27,4 +27,3 @@ macro_rules! serde_round_trip (
assert_eq!($var, decoded);
})
);

View File

@ -94,17 +94,10 @@ impl fmt::Display for Error {
Error::InvalidWitnessVersion(v) => write!(f, "invalid witness script version: {}", v),
Error::UnparsableWitnessVersion(_) => write!(f, "incorrect format of a witness version byte"),
Error::MalformedWitnessVersion => f.write_str("bitcoin script opcode does not match any known witness version, the script is malformed"),
Error::InvalidWitnessProgramLength(l) => write!(f,
"the witness program must be between 2 and 40 bytes in length: length={}", l,
),
Error::InvalidSegwitV0ProgramLength(l) => write!(f,
"a v0 witness program must be either of length 20 or 32 bytes: length={}", l,
),
Error::UncompressedPubkey => write!(f,
"an uncompressed pubkey was used where it is not allowed",
),
Error::ExcessiveScriptSize => write!(f,
"Script size exceed 520 bytes")
Error::InvalidWitnessProgramLength(l) => write!(f, "the witness program must be between 2 and 40 bytes in length: length={}", l),
Error::InvalidSegwitV0ProgramLength(l) => write!(f, "a v0 witness program must be either of length 20 or 32 bytes: length={}", l),
Error::UncompressedPubkey => write!(f, "an uncompressed pubkey was used where it is not allowed"),
Error::ExcessiveScriptSize => write!(f, "Script size exceed 520 bytes"),
}
}
}
@ -395,14 +388,11 @@ impl Payload {
/// Generates a script pubkey spending to this [Payload].
pub fn script_pubkey(&self) -> script::Script {
match *self {
Payload::PubkeyHash(ref hash) =>
script::Script::new_p2pkh(hash),
Payload::ScriptHash(ref hash) =>
script::Script::new_p2sh(hash),
Payload::WitnessProgram {
version,
program: ref prog,
} => script::Script::new_witness_program(version, prog)
Payload::PubkeyHash(ref hash) => script::Script::new_p2pkh(hash),
Payload::ScriptHash(ref hash) => script::Script::new_p2sh(hash),
Payload::WitnessProgram { version, program: ref prog } => {
script::Script::new_witness_program(version, prog)
}
}
}
@ -622,10 +612,7 @@ impl Address {
/// Creates a pay to taproot address from a pre-tweaked output key.
///
/// This method is not recommended for use, [`Address::p2tr()`] should be used where possible.
pub fn p2tr_tweaked(
output_key: TweakedPublicKey,
network: Network
) -> Address {
pub fn p2tr_tweaked(output_key: TweakedPublicKey, network: Network) -> Address {
Address {
network,
payload: Payload::p2tr_tweaked(output_key),

View File

@ -961,10 +961,7 @@ impl<T> CheckedSum<SignedAmount> for T where T: Iterator<Item = SignedAmount> {
fn checked_sum(mut self) -> Option<SignedAmount> {
let first = Some(self.next().unwrap_or_default());
self.fold(
first,
|acc, item| acc.and_then(|acc| acc.checked_add(item))
)
self.fold(first, |acc, item| acc.and_then(|acc| acc.checked_add(item)))
}
}
@ -1133,12 +1130,13 @@ pub mod serde {
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error {
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>
D: Deserializer<'de>,
{
Ok(Some(X::des_sat(d)?))
}
@ -1196,7 +1194,8 @@ pub mod serde {
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error {
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Self::Value, D::Error>
@ -1532,10 +1531,7 @@ mod tests {
samt: SignedAmount::from_sat(-123456789),
},
&[
serde_test::Token::Struct {
name: "T",
len: 2,
},
serde_test::Token::Struct { name: "T", len: 2 },
serde_test::Token::Str("amt"),
serde_test::Token::U64(123456789),
serde_test::Token::Str("samt"),

View File

@ -519,7 +519,7 @@ mod test {
use super::*;
extern crate serde_json;
use self::serde_json::{Value};
use self::serde_json::Value;
use consensus::encode::deserialize;
use std::collections::HashMap;

View File

@ -901,34 +901,26 @@ mod tests {
assert_eq!(indexed.child(ChildNumber::from_hardened_idx(2).unwrap()), path);
}
fn test_path<C: secp256k1::Signing + secp256k1::Verification>(secp: &Secp256k1<C>,
fn test_path<C: secp256k1::Signing + secp256k1::Verification>(
secp: &Secp256k1<C>,
network: Network,
seed: &[u8],
path: DerivationPath,
expected_sk: &str,
expected_pk: &str) {
expected_pk: &str)
{
let mut sk = ExtendedPrivKey::new_master(network, seed).unwrap();
let mut pk = ExtendedPubKey::from_priv(secp, &sk);
// Check derivation convenience method for ExtendedPrivKey
assert_eq!(
&sk.derive_priv(secp, &path).unwrap().to_string()[..],
expected_sk
);
assert_eq!(&sk.derive_priv(secp, &path).unwrap().to_string()[..], expected_sk);
// Check derivation convenience method for ExtendedPubKey, should error
// appropriately if any ChildNumber is hardened
if path.0.iter().any(|cnum| cnum.is_hardened()) {
assert_eq!(
pk.derive_pub(secp, &path),
Err(Error::CannotDeriveFromHardenedKey)
);
assert_eq!(pk.derive_pub(secp, &path), Err(Error::CannotDeriveFromHardenedKey));
} else {
assert_eq!(
&pk.derive_pub(secp, &path).unwrap().to_string()[..],
expected_pk
);
assert_eq!(&pk.derive_pub(secp, &path).unwrap().to_string()[..], expected_pk);
}
// Derive keys, checking hardened and non-hardened derivation one-by-one

View File

@ -36,7 +36,8 @@ use consensus::encode::Encodable;
/// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root.
/// - `Some(merkle_root)` if length of `hashes` is greater than one.
pub fn bitcoin_merkle_root_inline<T>(hashes: &mut [T]) -> Option<T>
where T: Hash + Encodable,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
{
match hashes.len() {
@ -53,7 +54,8 @@ pub fn bitcoin_merkle_root_inline<T>(hashes: &mut [T]) -> Option<T>
/// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root.
/// - `Some(merkle_root)` if length of `hashes` is greater than one.
pub fn bitcoin_merkle_root<T, I>(mut hashes: I) -> Option<T>
where T: Hash + Encodable,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
I: Iterator<Item = T>,
{
@ -84,7 +86,8 @@ pub fn bitcoin_merkle_root<T, I>(mut hashes: I) -> Option<T>
// `hashes` must contain at least one hash.
fn merkle_root_r<T>(hashes: &mut [T]) -> T
where T: Hash + Encodable,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
{
if hashes.len() == 1 {

View File

@ -31,7 +31,6 @@ use hashes::{Hash, hash160, hex, hex::FromHex};
use hash_types::{PubkeyHash, WPubkeyHash};
use util::base58;
/// A key-related error.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Error {
@ -45,7 +44,6 @@ pub enum Error {
Hex(hex::Error)
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
@ -158,11 +156,7 @@ impl PublicKey {
let mut bytes = [0; 65];
reader.read_exact(&mut bytes[0..1])?;
let bytes = if bytes[0] < 4 {
&mut bytes[..33]
} else {
&mut bytes[..65]
};
let bytes = if bytes[0] < 4 { &mut bytes[..33] } else { &mut bytes[..65] };
reader.read_exact(&mut bytes[1..])?;
Self::from_slice(bytes).map_err(|e| {
@ -189,10 +183,12 @@ impl PublicKey {
/// Deserialize a public key from a slice
pub fn from_slice(data: &[u8]) -> Result<PublicKey, Error> {
let compressed: bool = match data.len() {
let compressed = match data.len() {
33 => true,
65 => false,
len => { return Err(base58::Error::InvalidLength(len).into()); },
len => {
return Err(base58::Error::InvalidLength(len).into());
},
};
if !compressed && data[0] != 0x04 {
@ -285,10 +281,7 @@ impl PrivateKey {
/// Deserialize a private key from a slice
pub fn from_slice(data: &[u8], network: Network) -> Result<PrivateKey, Error> {
Ok(PrivateKey::new(
secp256k1::SecretKey::from_slice(data)?,
network,
))
Ok(PrivateKey::new(secp256k1::SecretKey::from_slice(data)?, network))
}
/// Format the private key to WIF format.
@ -323,13 +316,17 @@ impl PrivateKey {
let compressed = match data.len() {
33 => false,
34 => true,
_ => { return Err(Error::Base58(base58::Error::InvalidLength(data.len()))); }
_ => {
return Err(Error::Base58(base58::Error::InvalidLength(data.len())));
}
};
let network = match data[0] {
128 => Network::Bitcoin,
239 => Network::Testnet,
x => { return Err(Error::Base58(base58::Error::InvalidAddressVersion(x))); }
x => {
return Err(Error::Base58(base58::Error::InvalidAddressVersion(x)));
}
};
Ok(PrivateKey {

View File

@ -189,9 +189,7 @@ impl PartialMerkleTree {
}
// there can never be more hashes provided than one for every txid
if self.hashes.len() as u32 > self.num_transactions {
return Err(BadFormat(
"Proof contains more hashes than transactions".to_owned(),
));
return Err(BadFormat("Proof contains more hashes than transactions".to_owned()));
};
// there must be at least one bit per node in the partial tree, and at least one node per hash
if self.bits.len() < self.hashes.len() {
@ -246,13 +244,7 @@ impl PartialMerkleTree {
}
/// Recursive function that traverses tree nodes, storing the data as bits and hashes
fn traverse_and_build(
&mut self,
height: u32,
pos: u32,
txids: &[Txid],
matches: &[bool],
) {
fn traverse_and_build(&mut self, height: u32, pos: u32, txids: &[Txid], matches: &[bool]) {
// Determine whether this node is the parent of at least one matched txid
let mut parent_of_match = false;
let mut p = pos << height;
@ -348,10 +340,7 @@ impl PartialMerkleTree {
}
impl Encodable for PartialMerkleTree {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let ret = self.num_transactions.consensus_encode(&mut s)?
+ self.hashes.consensus_encode(&mut s)?;
let mut bytes: Vec<u8> = vec![0; (self.bits.len() + 7) / 8];
@ -432,7 +421,9 @@ impl MerkleBlock {
/// assert_eq!(txid, matches[0]);
/// ```
pub fn from_block_with_predicate<F>(block: &Block, match_txids: F) -> Self
where F: Fn(&Txid) -> bool {
where
F: Fn(&Txid) -> bool
{
let block_txids: Vec<_> = block.txdata.iter().map(Transaction::txid).collect();
Self::from_header_txids_with_predicate(&block.header, &block_txids, match_txids)
}
@ -453,7 +444,10 @@ impl MerkleBlock {
header: &BlockHeader,
block_txids: &[Txid],
match_txids: F,
) -> Self where F: Fn(&Txid) -> bool {
) -> Self
where
F: Fn(&Txid) -> bool
{
let matches: Vec<bool> = block_txids
.iter()
.map(match_txids)
@ -497,10 +491,7 @@ impl MerkleBlock {
}
impl Encodable for MerkleBlock {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.header.consensus_encode(&mut s)?
+ self.txn.consensus_encode(s)?;
Ok(len)

View File

@ -217,8 +217,12 @@ mod message_signing {
/// instance of it, returning the number of instances removed.
/// Loops through the vector opcode by opcode, skipping pushed data.
pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
if needle.len() > haystack.len() { return 0; }
if needle.is_empty() { return 0; }
if needle.len() > haystack.len() {
return 0;
}
if needle.is_empty() {
return 0;
}
let mut top = haystack.len() - needle.len();
let mut n_deleted = 0;
@ -233,7 +237,9 @@ pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
// This is ugly but prevents infinite loop in case of overflow
let overflow = top < needle.len();
top = top.wrapping_sub(needle.len());
if overflow { break; }
if overflow {
break;
}
} else {
i += match opcodes::All::from((*haystack)[i]).classify(opcodes::ClassifyContext::Legacy) {
opcodes::Class::PushBytes(n) => n as usize + 1,

View File

@ -32,16 +32,10 @@ pub(super) trait Map {
fn get_pairs(&self) -> Result<Vec<raw::Pair>, io::Error>;
/// Encodes map data with bitcoin consensus encoding.
fn consensus_encode_map<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode_map<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
for pair in Map::get_pairs(self)? {
len += encode::Encodable::consensus_encode(
&pair,
&mut s,
)?;
len += encode::Encodable::consensus_encode(&pair, &mut s)?;
}
Ok(len + encode::Encodable::consensus_encode(&0x00_u8, s)?)

View File

@ -238,10 +238,7 @@ mod display_from_str {
pub use self::display_from_str::PsbtParseError;
impl Encodable for PartiallySignedTransaction {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += b"psbt".consensus_encode(&mut s)?;
@ -345,10 +342,7 @@ mod tests {
inputs: vec![],
outputs: vec![],
};
assert_eq!(
serialize_hex(&psbt),
"70736274ff01000a0200000000000000000000"
);
assert_eq!(serialize_hex(&psbt), "70736274ff01000a0200000000000000000000");
}
#[test]
@ -390,12 +384,8 @@ mod tests {
hd_keypaths.insert(pk.public_key, (fprint, dpath.into()));
let expected: Output = Output {
redeem_script: Some(hex_script!(
"76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac"
)),
witness_script: Some(hex_script!(
"a9143545e6e33b832c47050f24d3eeb93c9c03948bc787"
)),
redeem_script: Some(hex_script!("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac")),
witness_script: Some(hex_script!("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787")),
bip32_derivation: hd_keypaths,
..Default::default()
};
@ -441,13 +431,8 @@ mod tests {
version: 0,
proprietary: Default::default(),
unknown: Default::default(),
inputs: vec![
Input::default(),
],
outputs: vec![
Output::default(),
Output::default()
]
inputs: vec![Input::default()],
outputs: vec![Output::default(), Output::default()],
};
let actual: PartiallySignedTransaction = deserialize(&serialize(&expected)).unwrap();
@ -804,8 +789,7 @@ mod tests {
let psbt_non_witness_utxo = (&psbt.inputs[0].non_witness_utxo).as_ref().unwrap();
assert_eq!(tx_input.previous_output.txid, psbt_non_witness_utxo.txid());
assert!(
psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
assert!(psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
.script_pubkey
.is_p2pkh()
);
@ -871,9 +855,7 @@ mod tests {
let tx = &psbt.unsigned_tx;
assert_eq!(
tx.txid(),
Txid::from_hex(
"75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115"
).unwrap()
Txid::from_hex("75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115").unwrap(),
);
let mut unknown: BTreeMap<raw::Key, Vec<u8>> = BTreeMap::new();
@ -893,7 +875,6 @@ mod tests {
use super::*;
use super::serialize;
#[test]
fn invalid_vectors() {
let err = hex_psbt!("70736274ff010071020000000127744ababf3027fe0d6cf23a96eee2efb188ef52301954585883e69b6624b2420000000000ffffffff02787c01000000000016001483a7e34bd99ff03a4962ef8a1a101bb295461ece606b042a010000001600147ac369df1b20e033d6116623957b0ac49f3c52e8000000000001012b00f2052a010000002251205a2c2cf5b52cf31f83ad2e8da63ff03183ecd8f609c7510ae8a48e03910a075701172102fe349064c98d6e2a853fa3c9b12bd8b304a19c195c60efa7ee2393046d3fa232000000").unwrap_err();

View File

@ -99,18 +99,12 @@ impl Decodable for Key {
key.push(Decodable::consensus_decode(&mut d)?);
}
Ok(Key {
type_value,
key,
})
Ok(Key { type_value, key })
}
}
impl Encodable for Key {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt((self.key.len() + 1) as u64).consensus_encode(&mut s)?;
@ -125,10 +119,7 @@ impl Encodable for Key {
}
impl Encodable for Pair {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.key.consensus_encode(&mut s)?;
Ok(len + self.value.consensus_encode(s)?)
}
@ -159,11 +150,7 @@ impl<Subtype> Decodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u
let subtype = Subtype::from(d.read_u8()?);
let key = read_to_end(d)?;
Ok(ProprietaryKey {
prefix,
subtype,
key
})
Ok(ProprietaryKey { prefix, subtype, key })
}
}

View File

@ -598,9 +598,7 @@ impl TaprootMerkleBranch {
if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 {
Err(TaprootError::InvalidMerkleBranchSize(sl.len()))
} else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT {
Err(TaprootError::InvalidMerkleTreeDepth(
sl.len() / TAPROOT_CONTROL_NODE_SIZE,
))
Err(TaprootError::InvalidMerkleTreeDepth(sl.len() / TAPROOT_CONTROL_NODE_SIZE))
} else {
let inner = sl
// TODO: Use chunks_exact after MSRV changes to 1.31
@ -717,8 +715,7 @@ impl ControlBlock {
/// applied when encoding this element as a witness.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(self.size());
self.encode(&mut buf)
.expect("writers don't error");
self.encode(&mut buf).expect("writers don't error");
buf
}
@ -885,7 +882,10 @@ impl ::serde::Serialize for LeafVersion {
#[cfg(feature = "serde")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl<'de> ::serde::Deserialize<'de> for LeafVersion {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: ::serde::Deserializer<'de> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>
{
struct U8Visitor;
impl<'de> ::serde::de::Visitor<'de> for U8Visitor {
type Value = LeafVersion;
@ -936,11 +936,9 @@ impl fmt::Display for TaprootBuilderError {
"Attempted to create a tree with two nodes at depth 0. There must\
only be a exactly one node at depth 0",
),
TaprootBuilderError::InvalidMerkleTreeDepth(d) => write!(
f,
"Merkle Tree depth({}) must be less than {}",
d, TAPROOT_CONTROL_MAX_NODE_COUNT
),
TaprootBuilderError::InvalidMerkleTreeDepth(d) => {
write!(f, "Merkle Tree depth({}) must be less than {}", d, TAPROOT_CONTROL_MAX_NODE_COUNT)
}
TaprootBuilderError::InvalidInternalKey(e) => {
write!(f, "Invalid Internal XOnly key : {}", e)
}

View File

@ -19,7 +19,7 @@
//!
macro_rules! construct_uint {
($name:ident, $n_words:expr) => (
($name:ident, $n_words:expr) => {
/// Little-endian large integer type
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
pub struct $name(pub [u64; $n_words]);
@ -169,7 +169,9 @@ macro_rules! construct_uint {
let &mut $name(ref mut arr) = self;
for i in 0..$n_words {
arr[i] = arr[i].wrapping_add(1);
if arr[i] != 0 { break; }
if arr[i] != 0 {
break;
}
}
}
}
@ -188,8 +190,12 @@ macro_rules! construct_uint {
// and the auto derive is a lexicographic ordering(i.e. memcmp)
// which with numbers is equivalent to big-endian
for i in 0..$n_words {
if self[$n_words - 1 - i] < other[$n_words - 1 - i] { return ::core::cmp::Ordering::Less; }
if self[$n_words - 1 - i] > other[$n_words - 1 - i] { return ::core::cmp::Ordering::Greater; }
if self[$n_words - 1 - i] < other[$n_words - 1 - i] {
return ::core::cmp::Ordering::Less;
}
if self[$n_words - 1 - i] > other[$n_words - 1 - i] {
return ::core::cmp::Ordering::Greater;
}
}
::core::cmp::Ordering::Equal
}
@ -499,7 +505,7 @@ macro_rules! construct_uint {
}
}
}
);
};
}
construct_uint!(Uint256, 4);