Merge rust-bitcoin/rust-bitcoin#806: Use rustfmt to hint at clean ups for the codebase

a77907d59c Remove unnecessary explicit type annotation (Tobin Harding)
71cf00a314 Use less vertical lines (Tobin Harding)
a5c06e0a96 Refactor vector initialisation (Tobin Harding)
aabf2d1681 Use brace not parenth fo macro arm (Tobin Harding)
b021415a88 Use block stlye function call (Tobin Harding)
d6462bae7b Refactor usage of + (Tobin Harding)
702e8bf82d Refactor consensus_encode (Tobin Harding)
a8ed95ea07 Refactor where statements (Tobin Harding)
6d84998168 Improve braces usage (Tobin Harding)
39ec59620d Fix unusual indentation (Tobin Harding)
b9b6e7e1c6 Remove unneeded braces (Tobin Harding)
5d68ad85ed Remove unneeded return statement (Tobin Harding)
bf4f5638e0 Refactor whitespace (Tobin Harding)
1c502399f1 Remove trailing whitespace (Tobin Harding)

Pull request description:

  `rustfmt` is still under discussion, while researching the topic I came across a maintainer of another project that does not use `rustfmt` who mentioned that he manually implemented the `rusfmt` suggestions that he liked ever month or so. This seemed like a good idea so I did it. This was extremely painful but I believe I have put together a PR that is non-controversial with well separated patches.

  Totally non urgent.

ACKs for top commit:
  apoelstra:
    ACK a77907d59c
  sanket1729:
    ACK a77907d59c.

Tree-SHA512: 27aa10d1c6d02d0e5bc335a5cda9cf2664b968c298d2ea6c653b8074abf18764a9d0f19c36222852fc23b887ab64144901dae059088e61478e9a90a042221e61
This commit is contained in:
Andrew Poelstra 2022-03-17 17:37:36 +00:00
commit 7e6f514bdf
No known key found for this signature in database
GPG Key ID: C588D63CE41B97C1
30 changed files with 300 additions and 452 deletions

View File

@ -175,7 +175,7 @@ impl Block {
}
/// check if merkle root of header matches merkle root of the transaction list
pub fn check_merkle_root (&self) -> bool {
pub fn check_merkle_root(&self) -> bool {
match self.compute_merkle_root() {
Some(merkle_root) => self.header.merkle_root == merkle_root,
None => false,
@ -229,7 +229,7 @@ impl Block {
}
/// Computes the witness commitment for the block's transaction list.
pub fn compute_witness_commitment (witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment {
pub fn compute_witness_commitment(witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment {
let mut encoder = WitnessCommitment::engine();
witness_root.consensus_encode(&mut encoder).expect("engines don't error");
encoder.input(witness_reserved_value);
@ -238,14 +238,14 @@ impl Block {
/// Computes the merkle root of transactions hashed for witness.
pub fn witness_root(&self) -> Option<WitnessMerkleNode> {
let hashes = self.txdata.iter().enumerate().map(|(i, t)|
let hashes = self.txdata.iter().enumerate().map(|(i, t)| {
if i == 0 {
// Replace the first hash with zeroes.
Wtxid::default().as_hash()
} else {
t.wtxid().as_hash()
}
);
});
bitcoin_merkle_root(hashes).map(|h| h.into())
}

View File

@ -632,10 +632,10 @@ impl fmt::Debug for All {
all::OP_NUMEQUAL => write!(f, "NUMEQUAL"),
all::OP_NUMEQUALVERIFY => write!(f, "NUMEQUALVERIFY"),
all::OP_NUMNOTEQUAL => write!(f, "NUMNOTEQUAL"),
all::OP_LESSTHAN => write!(f, "LESSTHAN"),
all::OP_GREATERTHAN => write!(f, "GREATERTHAN"),
all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"),
all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"),
all::OP_LESSTHAN => write!(f, "LESSTHAN"),
all::OP_GREATERTHAN => write!(f, "GREATERTHAN"),
all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"),
all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"),
all::OP_MIN => write!(f, "MIN"),
all::OP_MAX => write!(f, "MAX"),
all::OP_WITHIN => write!(f, "WITHIN"),
@ -860,8 +860,8 @@ impl Ordinary {
/// Encode as a byte
#[inline]
pub fn into_u8(self) -> u8 {
self as u8
}
self as u8
}
}
#[cfg(test)]

View File

@ -89,9 +89,8 @@ impl fmt::UpperHex for Script {
impl hex::FromHex for Script {
fn from_byte_iter<I>(iter: I) -> Result<Self, hex::Error>
where I: Iterator<Item=Result<u8, hex::Error>> +
ExactSizeIterator +
DoubleEndedIterator,
where
I: Iterator<Item=Result<u8, hex::Error>> + ExactSizeIterator + DoubleEndedIterator,
{
Vec::from_byte_iter(iter).map(|v| Script(Box::<[u8]>::from(v)))
}
@ -141,11 +140,11 @@ impl fmt::Display for Error {
Error::NonMinimalPush => "non-minimal datapush",
Error::EarlyEndOfScript => "unexpected end of script",
Error::NumericOverflow => "numeric overflow (number on stack larger than 4 bytes)",
#[cfg(feature="bitcoinconsensus")]
#[cfg(feature = "bitcoinconsensus")]
Error::BitcoinConsensus(ref _n) => "bitcoinconsensus verification failed",
#[cfg(feature="bitcoinconsensus")]
#[cfg(feature = "bitcoinconsensus")]
Error::UnknownSpentOutput(ref _point) => "unknown spent output Transaction::verify()",
#[cfg(feature="bitcoinconsensus")]
#[cfg(feature = "bitcoinconsensus")]
Error::SerializationError => "can not serialize the spending transaction in Transaction::verify()",
};
f.write_str(str)
@ -717,7 +716,7 @@ impl<'a> Iterator for Instructions<'a> {
opcodes::Class::PushBytes(n) => {
let n = n as usize;
if self.data.len() < n + 1 {
self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors
self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors
return Some(Err(Error::EarlyEndOfScript));
}
if self.enforce_minimal {
@ -949,7 +948,8 @@ impl_index_newtype!(Builder, u8);
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl<'de> serde::Deserialize<'de> for Script {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: serde::Deserializer<'de>,
where
D: serde::Deserializer<'de>,
{
use core::fmt::Formatter;
use hashes::hex::FromHex;
@ -965,20 +965,23 @@ impl<'de> serde::Deserialize<'de> for Script {
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
let v = Vec::from_hex(v).map_err(E::custom)?;
Ok(Script::from(v))
}
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
self.visit_str(v)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
self.visit_str(&v)
}
@ -995,7 +998,8 @@ impl<'de> serde::Deserialize<'de> for Script {
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
Ok(Script::from(v.to_vec()))
}
@ -1023,10 +1027,7 @@ impl serde::Serialize for Script {
impl Encodable for Script {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
self.0.consensus_encode(s)
}
}
@ -1354,38 +1355,19 @@ mod test {
let slop_v_nonmin: Result<Vec<Instruction>, Error> = nonminimal.instructions().collect();
let slop_v_nonmin_alt: Result<Vec<Instruction>, Error> = nonminimal_alt.instructions().collect();
assert_eq!(
v_zero.unwrap(),
vec![
Instruction::PushBytes(&[]),
]
);
assert_eq!(
v_zeropush.unwrap(),
vec![
Instruction::PushBytes(&[0]),
]
);
assert_eq!(v_zero.unwrap(), vec![Instruction::PushBytes(&[])]);
assert_eq!(v_zeropush.unwrap(), vec![Instruction::PushBytes(&[0])]);
assert_eq!(
v_min.clone().unwrap(),
vec![
Instruction::PushBytes(&[105]),
Instruction::Op(opcodes::OP_NOP3),
]
vec![Instruction::PushBytes(&[105]), Instruction::Op(opcodes::OP_NOP3)]
);
assert_eq!(
v_nonmin.err().unwrap(),
Error::NonMinimalPush
);
assert_eq!(v_nonmin.err().unwrap(), Error::NonMinimalPush);
assert_eq!(
v_nonmin_alt.clone().unwrap(),
vec![
Instruction::PushBytes(&[105, 0]),
Instruction::Op(opcodes::OP_NOP3),
]
vec![Instruction::PushBytes(&[105, 0]), Instruction::Op(opcodes::OP_NOP3)]
);
assert_eq!(v_min.clone().unwrap(), slop_v_min.unwrap());
@ -1395,7 +1377,7 @@ mod test {
#[test]
fn script_ord() {
let script_1 = Builder::new().push_slice(&[1,2,3,4]).into_script();
let script_1 = Builder::new().push_slice(&[1, 2, 3, 4]).into_script();
let script_2 = Builder::new().push_int(10).into_script();
let script_3 = Builder::new().push_int(15).into_script();
let script_4 = Builder::new().push_opcode(opcodes::all::OP_RETURN).into_script();
@ -1413,7 +1395,7 @@ mod test {
}
#[test]
#[cfg(feature="bitcoinconsensus")]
#[cfg(feature = "bitcoinconsensus")]
fn test_bitcoinconsensus () {
// a random segwit transaction from the blockchain using native segwit
let spent = Builder::from(Vec::from_hex("0020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d").unwrap()).into_script();

View File

@ -59,10 +59,7 @@ impl OutPoint {
/// Creates a new [`OutPoint`].
#[inline]
pub fn new(txid: Txid, vout: u32) -> OutPoint {
OutPoint {
txid,
vout,
}
OutPoint { txid, vout }
}
/// Creates a "null" `OutPoint`.
@ -137,7 +134,7 @@ impl fmt::Display for ParseOutPointError {
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for ParseOutPointError {
impl error::Error for ParseOutPointError {
fn cause(&self) -> Option<&dyn error::Error> {
match *self {
ParseOutPointError::Txid(ref e) => Some(e),
@ -504,7 +501,9 @@ impl Transaction {
#[cfg(feature="bitcoinconsensus")]
#[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))]
pub fn verify<S>(&self, spent: S) -> Result<(), script::Error>
where S: FnMut(&OutPoint) -> Option<TxOut> {
where
S: FnMut(&OutPoint) -> Option<TxOut>
{
self.verify_with_flags(spent, ::bitcoinconsensus::VERIFY_ALL)
}
@ -513,7 +512,10 @@ impl Transaction {
#[cfg(feature="bitcoinconsensus")]
#[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))]
pub fn verify_with_flags<S, F>(&self, mut spent: S, flags: F) -> Result<(), script::Error>
where S: FnMut(&OutPoint) -> Option<TxOut>, F : Into<u32> {
where
S: FnMut(&OutPoint) -> Option<TxOut>,
F: Into<u32>
{
let tx = encode::serialize(&*self);
let flags: u32 = flags.into();
for (idx, input) in self.input.iter().enumerate() {
@ -542,10 +544,7 @@ impl Transaction {
impl_consensus_encoding!(TxOut, value, script_pubkey);
impl Encodable for OutPoint {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.txid.consensus_encode(&mut s)?;
Ok(len + self.vout.consensus_encode(s)?)
}
@ -560,10 +559,7 @@ impl Decodable for OutPoint {
}
impl Encodable for TxIn {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.previous_output.consensus_encode(&mut s)?;
len += self.script_sig.consensus_encode(&mut s)?;
@ -583,10 +579,7 @@ impl Decodable for TxIn {
}
impl Encodable for Transaction {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.version.consensus_encode(&mut s)?;
// To avoid serialization ambiguity, no inputs means we use BIP141 serialization (see
@ -643,9 +636,7 @@ impl Decodable for Transaction {
}
}
// We don't support anything else
x => {
Err(encode::Error::UnsupportedSegwitFlag(x))
}
x => Err(encode::Error::UnsupportedSegwitFlag(x)),
}
// non-segwit
} else {
@ -674,8 +665,8 @@ impl fmt::Display for NonStandardSigHashType {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for NonStandardSigHashType {}
/// Legacy Hashtype of an input's signature.
#[deprecated(since="0.28.0", note="Please use [`EcdsaSigHashType`] instead")]
/// Legacy Hashtype of an input's signature
#[deprecated(since = "0.28.0", note = "Please use [`EcdsaSigHashType`] instead")]
pub type SigHashType = EcdsaSigHashType;
/// Hashtype of an input's signature, encoded in the last byte of the signature.
@ -736,17 +727,17 @@ impl EcdsaSigHashType {
/// Splits the sighash flag into the "real" sighash flag and the ANYONECANPAY boolean.
pub(crate) fn split_anyonecanpay_flag(self) -> (EcdsaSigHashType, bool) {
match self {
EcdsaSigHashType::All => (EcdsaSigHashType::All, false),
EcdsaSigHashType::None => (EcdsaSigHashType::None, false),
EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false),
EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true),
EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true),
EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true)
EcdsaSigHashType::All => (EcdsaSigHashType::All, false),
EcdsaSigHashType::None => (EcdsaSigHashType::None, false),
EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false),
EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true),
EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true),
EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true)
}
}
/// Reads a 4-byte uint32 as a sighash type.
#[deprecated(since="0.26.1", note="please use `from_u32_consensus` or `from_u32_standard` instead")]
#[deprecated(since = "0.26.1", note = "please use `from_u32_consensus` or `from_u32_standard` instead")]
pub fn from_u32(n: u32) -> EcdsaSigHashType {
Self::from_u32_consensus(n)
}
@ -1117,12 +1108,14 @@ mod tests {
#[test]
fn test_sighashtype_fromstr_display() {
let sighashtypes = vec![("SIGHASH_ALL", EcdsaSigHashType::All),
let sighashtypes = vec![
("SIGHASH_ALL", EcdsaSigHashType::All),
("SIGHASH_NONE", EcdsaSigHashType::None),
("SIGHASH_SINGLE", EcdsaSigHashType::Single),
("SIGHASH_ALL|SIGHASH_ANYONECANPAY", EcdsaSigHashType::AllPlusAnyoneCanPay),
("SIGHASH_NONE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::NonePlusAnyoneCanPay),
("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay)];
("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay)
];
for (s, sht) in sighashtypes {
assert_eq!(sht.to_string(), s);
assert_eq!(EcdsaSigHashType::from_str(s).unwrap(), sht);
@ -1486,7 +1479,7 @@ mod tests {
// test that we fail with repeated use of same input
let mut double_spending = spending.clone();
let re_use = double_spending.input[0].clone();
double_spending.input.push (re_use);
double_spending.input.push(re_use);
assert!(double_spending.verify(|point: &OutPoint| {
if let Some(tx) = spent2.remove(&point.txid) {

View File

@ -202,10 +202,8 @@ impl Witness {
self.last = self.content.len();
let element_len_varint = VarInt(new_element.len() as u64);
let current_content_len = self.content.len();
self.content.resize(
current_content_len + element_len_varint.len() + new_element.len(),
0,
);
self.content
.resize(current_content_len + element_len_varint.len() + new_element.len(), 0);
let end_varint = current_content_len + element_len_varint.len();
element_len_varint
.consensus_encode(&mut self.content[current_content_len..end_varint])
@ -359,14 +357,9 @@ mod test {
for (i, wit_el) in tx.input[0].witness.iter().enumerate() {
assert_eq!(expected_wit[i], wit_el.to_hex());
}
assert_eq!(
expected_wit[1],
tx.input[0].witness.last().unwrap().to_hex()
);
assert_eq!(
expected_wit[0],
tx.input[0].witness.second_to_last().unwrap().to_hex()
);
assert_eq!(expected_wit[1], tx.input[0].witness.last().unwrap().to_hex());
assert_eq!(expected_wit[0], tx.input[0].witness.second_to_last().unwrap().to_hex());
let tx_bytes_back = serialize(&tx);
assert_eq!(tx_bytes_back, tx_bytes);
}

View File

@ -61,7 +61,7 @@ pub enum Error {
actual: u32,
},
/// Tried to allocate an oversized vector
OversizedVectorAllocation{
OversizedVectorAllocation {
/// The capacity requested
requested: usize,
/// The maximum capacity
@ -164,9 +164,7 @@ pub fn deserialize<T: Decodable>(data: &[u8]) -> Result<T, Error> {
/// Deserialize an object from a vector, but will not report an error if said deserialization
/// doesn't consume the entire vector.
pub fn deserialize_partial<T: Decodable>(
data: &[u8],
) -> Result<(T, usize), Error> {
pub fn deserialize_partial<T: Decodable>(data: &[u8]) -> Result<(T, usize), Error> {
let mut decoder = Cursor::new(data);
let rv = Decodable::consensus_decode(&mut decoder)?;
let consumed = decoder.position() as usize;
@ -333,8 +331,8 @@ pub struct VarInt(pub u64);
pub struct CheckedData(pub Vec<u8>);
// Primitive types
macro_rules! impl_int_encodable{
($ty:ident, $meth_dec:ident, $meth_enc:ident) => (
macro_rules! impl_int_encodable {
($ty:ident, $meth_dec:ident, $meth_enc:ident) => {
impl Decodable for $ty {
#[inline]
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, Error> {
@ -343,15 +341,12 @@ macro_rules! impl_int_encodable{
}
impl Encodable for $ty {
#[inline]
fn consensus_encode<S: WriteExt>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: WriteExt>(&self, mut s: S) -> Result<usize, io::Error> {
s.$meth_enc(*self)?;
Ok(mem::size_of::<$ty>())
}
}
)
}
}
impl_int_encodable!(u8, read_u8, emit_u8);
@ -439,7 +434,6 @@ impl Decodable for VarInt {
}
}
// Booleans
impl Encodable for bool {
#[inline]
@ -498,13 +492,10 @@ impl Decodable for Cow<'static, str> {
// Arrays
macro_rules! impl_array {
( $size:expr ) => (
( $size:expr ) => {
impl Encodable for [u8; $size] {
#[inline]
fn consensus_encode<S: WriteExt>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: WriteExt>(&self, mut s: S) -> Result<usize, io::Error> {
s.emit_slice(&self[..])?;
Ok(self.len())
}
@ -518,7 +509,7 @@ macro_rules! impl_array {
Ok(ret)
}
}
);
};
}
impl_array!(2);
@ -554,10 +545,7 @@ macro_rules! impl_vec {
($type: ty) => {
impl Encodable for Vec<$type> {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt(self.len() as u64).consensus_encode(&mut s)?;
for c in self.iter() {
@ -571,8 +559,8 @@ macro_rules! impl_vec {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, Error> {
let len = VarInt::consensus_decode(&mut d)?.0;
let byte_size = (len as usize)
.checked_mul(mem::size_of::<$type>())
.ok_or(self::Error::ParseFailed("Invalid length"))?;
.checked_mul(mem::size_of::<$type>())
.ok_or(self::Error::ParseFailed("Invalid length"))?;
if byte_size > MAX_VEC_SIZE {
return Err(self::Error::OversizedVectorAllocation { requested: byte_size, max: MAX_VEC_SIZE })
}
@ -712,7 +700,7 @@ impl<T: Encodable> Encodable for sync::Arc<T> {
// Tuples
macro_rules! tuple_encode {
($($x:ident),*) => (
($($x:ident),*) => {
impl <$($x: Encodable),*> Encodable for ($($x),*) {
#[inline]
#[allow(non_snake_case)]
@ -734,7 +722,7 @@ macro_rules! tuple_encode {
Ok(($({let $x = Decodable::consensus_decode(&mut d)?; $x }),*))
}
}
);
};
}
tuple_encode!(T0, T1);
@ -1033,7 +1021,7 @@ mod tests {
let witness = vec![vec![0u8; 3_999_999]; 2];
let ser = serialize(&witness);
let mut reader = io::Cursor::new(ser);
let err = Vec::<Vec<u8>>::consensus_decode(&mut reader);
let err = Vec::<Vec<u8>>::consensus_decode(&mut reader);
assert!(err.is_err());
}

View File

@ -446,9 +446,10 @@ macro_rules! impl_bytes_newtype {
impl $crate::hashes::hex::FromHex for $t {
fn from_byte_iter<I>(iter: I) -> Result<Self, $crate::hashes::hex::Error>
where I: ::core::iter::Iterator<Item=Result<u8, $crate::hashes::hex::Error>> +
::core::iter::ExactSizeIterator +
::core::iter::DoubleEndedIterator,
where
I: ::core::iter::Iterator<Item=Result<u8, $crate::hashes::hex::Error>>
+ ::core::iter::ExactSizeIterator
+ ::core::iter::DoubleEndedIterator,
{
if iter.len() == $len {
let mut ret = [0; $len];

View File

@ -38,7 +38,7 @@ pub struct Address {
pub port: u16
}
const ONION : [u16; 3] = [0xFD87, 0xD87E, 0xEB43];
const ONION: [u16; 3] = [0xFD87, 0xD87E, 0xEB43];
impl Address {
/// Create an address message for a socket
@ -58,10 +58,7 @@ impl Address {
if addr[0..3] == ONION {
return Err(io::Error::from(io::ErrorKind::AddrNotAvailable));
}
let ipv6 = Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
);
let ipv6 = Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]);
if let Some(ipv4) = ipv6.to_ipv4() {
Ok(SocketAddr::V4(SocketAddrV4::new(ipv4, self.port)))
} else {
@ -82,10 +79,7 @@ fn addr_to_be(addr: [u16; 8]) -> [u16; 8] {
impl Encodable for Address {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.services.consensus_encode(&mut s)?
+ addr_to_be(self.address).consensus_encode(&mut s)?
@ -149,12 +143,11 @@ pub enum AddrV2 {
impl Encodable for AddrV2 {
fn consensus_encode<W: io::Write>(&self, e: W) -> Result<usize, io::Error> {
fn encode_addr<W: io::Write>(mut e: W, network: u8, bytes: &[u8]) -> Result<usize, io::Error> {
let len =
network.consensus_encode(&mut e)? +
VarInt(bytes.len() as u64).consensus_encode(&mut e)? +
bytes.len();
e.emit_slice(bytes)?;
Ok(len)
let len = network.consensus_encode(&mut e)?
+ VarInt(bytes.len() as u64).consensus_encode(&mut e)?
+ bytes.len();
e.emit_slice(bytes)?;
Ok(len)
}
Ok(match *self {
AddrV2::Ipv4(ref addr) => encode_addr(e, 1, &addr.octets())?,
@ -194,10 +187,7 @@ impl Decodable for AddrV2 {
if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] {
return Err(encode::Error::ParseFailed("IPV4 wrapped address sent with IPv6 network id"));
}
AddrV2::Ipv6(Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
))
AddrV2::Ipv6(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
},
3 => {
if len != 10 {
@ -230,10 +220,7 @@ impl Decodable for AddrV2 {
return Err(encode::Error::ParseFailed("Invalid CJDNS address"));
}
let addr = addr_to_be(addr);
AddrV2::Cjdns(Ipv6Addr::new(
addr[0],addr[1],addr[2],addr[3],
addr[4],addr[5],addr[6],addr[7]
))
AddrV2::Cjdns(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
},
_ => {
// len already checked above to be <= 512
@ -287,7 +274,7 @@ impl Encodable for AddrV2Message {
impl Decodable for AddrV2Message {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> {
Ok(AddrV2Message{
Ok(AddrV2Message {
time: Decodable::consensus_decode(&mut d)?,
services: ServiceFlags::from(VarInt::consensus_decode(&mut d)?.0),
addr: Decodable::consensus_decode(&mut d)?,

View File

@ -274,10 +274,7 @@ impl ops::BitXorAssign for ServiceFlags {
impl Encodable for ServiceFlags {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
self.0.consensus_encode(&mut s)
}
}
@ -296,39 +293,16 @@ mod tests {
#[test]
fn serialize_test() {
assert_eq!(
serialize(&Network::Bitcoin.magic()),
&[0xf9, 0xbe, 0xb4, 0xd9]
);
assert_eq!(
serialize(&Network::Testnet.magic()),
&[0x0b, 0x11, 0x09, 0x07]
);
assert_eq!(
serialize(&Network::Signet.magic()),
&[0x0a, 0x03, 0xcf, 0x40]
);
assert_eq!(
serialize(&Network::Regtest.magic()),
&[0xfa, 0xbf, 0xb5, 0xda]
);
assert_eq!(serialize(&Network::Bitcoin.magic()), &[0xf9, 0xbe, 0xb4, 0xd9]);
assert_eq!(serialize(&Network::Testnet.magic()), &[0x0b, 0x11, 0x09, 0x07]);
assert_eq!(serialize(&Network::Signet.magic()), &[0x0a, 0x03, 0xcf, 0x40]);
assert_eq!(serialize(&Network::Regtest.magic()), &[0xfa, 0xbf, 0xb5, 0xda]);
assert_eq!(deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(), Some(Network::Bitcoin.magic()));
assert_eq!(deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(), Some(Network::Testnet.magic()));
assert_eq!(deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(), Some(Network::Signet.magic()));
assert_eq!(deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(), Some(Network::Regtest.magic()));
assert_eq!(
deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(),
Some(Network::Bitcoin.magic())
);
assert_eq!(
deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(),
Some(Network::Testnet.magic())
);
assert_eq!(
deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(),
Some(Network::Signet.magic())
);
assert_eq!(
deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(),
Some(Network::Regtest.magic())
);
}
#[test]
@ -385,4 +359,3 @@ mod tests {
assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string());
}
}

View File

@ -75,10 +75,7 @@ impl AsRef<str> for CommandString {
impl Encodable for CommandString {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
let mut rawbytes = [0u8; 12];
let strbytes = self.0.as_bytes();
debug_assert!(strbytes.len() <= 12);
@ -116,7 +113,7 @@ impl fmt::Display for CommandStringError {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[cfg(feature = "std")]
impl ::std::error::Error for CommandStringError { }
impl ::std::error::Error for CommandStringError {}
/// A Network message
#[derive(Clone, Debug, PartialEq, Eq)]
@ -281,10 +278,7 @@ struct HeaderSerializationWrapper<'a>(&'a Vec<block::BlockHeader>);
impl<'a> Encodable for HeaderSerializationWrapper<'a> {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt(self.0.len() as u64).consensus_encode(&mut s)?;
for header in self.0.iter() {
@ -296,10 +290,7 @@ impl<'a> Encodable for HeaderSerializationWrapper<'a> {
}
impl Encodable for RawNetworkMessage {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += self.magic.consensus_encode(&mut s)?;
len += self.command().consensus_encode(&mut s)?;

View File

@ -54,14 +54,10 @@ pub enum Inventory {
impl Encodable for Inventory {
#[inline]
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
macro_rules! encode_inv {
($code:expr, $item:expr) => {
u32::consensus_encode(&$code, &mut s)? +
$item.consensus_encode(&mut s)?
u32::consensus_encode(&$code, &mut s)? + $item.consensus_encode(&mut s)?
}
}
Ok(match *self {

View File

@ -41,7 +41,7 @@ impl<R: Read> fmt::Debug for StreamReader<R> {
impl<R: Read> StreamReader<R> {
/// Constructs new stream reader for a given input stream `stream`
#[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
#[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
pub fn new(stream: R, _buffer_size: Option<usize>) -> StreamReader<R> {
StreamReader {
stream: BufReader::new(stream),
@ -49,7 +49,7 @@ impl<R: Read> StreamReader<R> {
}
/// Reads stream and parses next message from its current input
#[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
#[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
pub fn read_next<D: Decodable>(&mut self) -> Result<D, encode::Error> {
Decodable::consensus_decode(&mut self.stream)
}
@ -222,7 +222,7 @@ mod test {
let istream = TcpStream::connect(format!("127.0.0.1:{}", port)).unwrap();
let reader = BufReader::new(istream);
return (handle, reader)
(handle, reader)
}
#[test]

View File

@ -13,8 +13,8 @@ pub mod btreemap_byte_values {
use hashes::hex::{FromHex, ToHex};
use serde;
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
{
@ -32,15 +32,16 @@ pub mod btreemap_byte_values {
}
}
pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where
pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
impl<'de, T> serde::de::Visitor<'de> for Visitor<T>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
type Value = BTreeMap<T, Vec<u8>>;
@ -79,8 +80,8 @@ pub mod btreemap_as_seq {
use prelude::*;
use serde;
pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
U: serde::Serialize,
@ -99,8 +100,8 @@ pub mod btreemap_as_seq {
}
}
pub fn deserialize<'de, D, T, U>(d: D)
-> Result<BTreeMap<T, U>, D::Error> where
pub fn deserialize<'de, D, T, U>(d: D) -> Result<BTreeMap<T, U>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>,
@ -108,7 +109,8 @@ pub mod btreemap_as_seq {
use ::core::marker::PhantomData;
struct Visitor<T, U>(PhantomData<(T, U)>);
impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U> where
impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>,
{
@ -164,8 +166,8 @@ pub mod btreemap_as_seq_byte_values {
&'a [u8],
);
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
T: serde::Serialize + ::core::hash::Hash + Eq + Ord + 'static,
{
@ -183,15 +185,16 @@ pub mod btreemap_as_seq_byte_values {
}
}
pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where
pub fn deserialize<'de, D, T>(d: D) -> Result<BTreeMap<T, Vec<u8>>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
impl<'de, T> serde::de::Visitor<'de> for Visitor<T>
where
T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{
type Value = BTreeMap<T, Vec<u8>>;
@ -228,7 +231,8 @@ pub mod hex_bytes {
use serde;
pub fn serialize<T, S>(bytes: &T, s: S) -> Result<S::Ok, S::Error>
where T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer
where
T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer
{
// Don't do anything special when not human readable.
if !s.is_human_readable() {
@ -239,7 +243,8 @@ pub mod hex_bytes {
}
pub fn deserialize<'de, D, B>(d: D) -> Result<B, D::Error>
where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex,
where
D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex,
{
struct Visitor<B>(::core::marker::PhantomData<B>);
@ -251,7 +256,8 @@ pub mod hex_bytes {
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
if let Ok(hex) = ::core::str::from_utf8(v) {
FromHex::from_hex(hex).map_err(E::custom)
@ -261,7 +267,8 @@ pub mod hex_bytes {
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where E: serde::de::Error,
where
E: serde::de::Error,
{
FromHex::from_hex(v).map_err(E::custom)
}

View File

@ -27,4 +27,3 @@ macro_rules! serde_round_trip (
assert_eq!($var, decoded);
})
);

View File

@ -94,17 +94,10 @@ impl fmt::Display for Error {
Error::InvalidWitnessVersion(v) => write!(f, "invalid witness script version: {}", v),
Error::UnparsableWitnessVersion(_) => write!(f, "incorrect format of a witness version byte"),
Error::MalformedWitnessVersion => f.write_str("bitcoin script opcode does not match any known witness version, the script is malformed"),
Error::InvalidWitnessProgramLength(l) => write!(f,
"the witness program must be between 2 and 40 bytes in length: length={}", l,
),
Error::InvalidSegwitV0ProgramLength(l) => write!(f,
"a v0 witness program must be either of length 20 or 32 bytes: length={}", l,
),
Error::UncompressedPubkey => write!(f,
"an uncompressed pubkey was used where it is not allowed",
),
Error::ExcessiveScriptSize => write!(f,
"Script size exceed 520 bytes")
Error::InvalidWitnessProgramLength(l) => write!(f, "the witness program must be between 2 and 40 bytes in length: length={}", l),
Error::InvalidSegwitV0ProgramLength(l) => write!(f, "a v0 witness program must be either of length 20 or 32 bytes: length={}", l),
Error::UncompressedPubkey => write!(f, "an uncompressed pubkey was used where it is not allowed"),
Error::ExcessiveScriptSize => write!(f, "Script size exceed 520 bytes"),
}
}
}
@ -395,14 +388,11 @@ impl Payload {
/// Generates a script pubkey spending to this [Payload].
pub fn script_pubkey(&self) -> script::Script {
match *self {
Payload::PubkeyHash(ref hash) =>
script::Script::new_p2pkh(hash),
Payload::ScriptHash(ref hash) =>
script::Script::new_p2sh(hash),
Payload::WitnessProgram {
version,
program: ref prog,
} => script::Script::new_witness_program(version, prog)
Payload::PubkeyHash(ref hash) => script::Script::new_p2pkh(hash),
Payload::ScriptHash(ref hash) => script::Script::new_p2sh(hash),
Payload::WitnessProgram { version, program: ref prog } => {
script::Script::new_witness_program(version, prog)
}
}
}
@ -622,10 +612,7 @@ impl Address {
/// Creates a pay to taproot address from a pre-tweaked output key.
///
/// This method is not recommended for use, [`Address::p2tr()`] should be used where possible.
pub fn p2tr_tweaked(
output_key: TweakedPublicKey,
network: Network
) -> Address {
pub fn p2tr_tweaked(output_key: TweakedPublicKey, network: Network) -> Address {
Address {
network,
payload: Payload::p2tr_tweaked(output_key),
@ -1264,7 +1251,7 @@ mod tests {
}
#[test]
fn p2tr_from_untweaked(){
fn p2tr_from_untweaked() {
//Test case from BIP-086
let internal_key = XOnlyPublicKey::from_str("cc8a4bc64d897bddc5fbc2f670f7a8ba0b386779106cf1223c6fc5d7cd6fc115").unwrap();
let secp = Secp256k1::verification_only();

View File

@ -175,7 +175,7 @@ impl fmt::Display for ParseAmountError {
ParseAmountError::PossiblyConfusingDenomination(ref d) => {
let (letter, upper, lower) = match d.chars().next() {
Some('M') => ('M', "Mega", "milli"),
Some('P') => ('P',"Peta", "pico"),
Some('P') => ('P', "Peta", "pico"),
// This panic could be avoided by adding enum ConfusingDenomination { Mega, Peta } but is it worth it?
_ => panic!("invalid error information"),
};
@ -599,7 +599,7 @@ impl FromStr for Amount {
}
impl ::core::iter::Sum for Amount {
fn sum<I: Iterator<Item=Self>>(iter: I) -> Self {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats: u64 = iter.map(|amt| amt.0).sum();
Amount::from_sat(sats)
}
@ -933,7 +933,7 @@ impl FromStr for SignedAmount {
}
impl ::core::iter::Sum for SignedAmount {
fn sum<I: Iterator<Item=Self>>(iter: I) -> Self {
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats: i64 = iter.map(|amt| amt.0).sum();
SignedAmount::from_sat(sats)
}
@ -961,10 +961,7 @@ impl<T> CheckedSum<SignedAmount> for T where T: Iterator<Item = SignedAmount> {
fn checked_sum(mut self) -> Option<SignedAmount> {
let first = Some(self.next().unwrap_or_default());
self.fold(
first,
|acc, item| acc.and_then(|acc| acc.checked_add(item))
)
self.fold(first, |acc, item| acc.and_then(|acc| acc.checked_add(item)))
}
}
@ -1133,12 +1130,13 @@ pub mod serde {
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error {
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>
D: Deserializer<'de>,
{
Ok(Some(X::des_sat(d)?))
}
@ -1187,7 +1185,7 @@ pub mod serde {
) -> Result<Option<A>, D::Error> {
struct VisitOptAmt<X>(PhantomData<X>);
impl<'de, X :SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt<X> {
impl<'de, X: SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt<X> {
type Value = Option<X>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
@ -1196,7 +1194,8 @@ pub mod serde {
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: de::Error {
E: de::Error,
{
Ok(None)
}
fn visit_some<D>(self, d: D) -> Result<Self::Value, D::Error>
@ -1398,12 +1397,12 @@ mod tests {
let sa = SignedAmount::from_sat;
let ua = Amount::from_sat;
assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig));
assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig));
assert_eq!(ua(i64::max_value() as u64).to_signed(), Ok(sa(i64::max_value())));
assert_eq!(ua(0).to_signed(), Ok(sa(0)));
assert_eq!(ua(0).to_signed(), Ok(sa(0)));
assert_eq!(ua(1).to_signed(), Ok( sa(1)));
assert_eq!(ua(1).to_signed(), Ok(sa(1)));
assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig));
assert_eq!(ua(1).to_signed(), Ok(sa(1)));
assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig));
assert_eq!(sa(-1).to_unsigned(), Err(E::Negative));
assert_eq!(sa(i64::max_value()).to_unsigned(), Ok(ua(i64::max_value() as u64)));
@ -1532,10 +1531,7 @@ mod tests {
samt: SignedAmount::from_sat(-123456789),
},
&[
serde_test::Token::Struct {
name: "T",
len: 2,
},
serde_test::Token::Struct { name: "T", len: 2 },
serde_test::Token::Str("amt"),
serde_test::Token::U64(123456789),
serde_test::Token::Str("samt"),

View File

@ -293,10 +293,10 @@ mod tests {
// Addresses
let addr = Vec::from_hex("00f8917303bfa8ef24f292e8fa1419b20460ba064d").unwrap();
assert_eq!(&check_encode_slice(&addr[..]), "1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH");
}
}
#[test]
fn test_base58_decode() {
#[test]
fn test_base58_decode() {
// Basics
assert_eq!(from("1").ok(), Some(vec![0u8]));
assert_eq!(from("2").ok(), Some(vec![1u8]));

View File

@ -33,7 +33,7 @@ use util::sighash;
/// Parts of a sighash which are common across inputs or signatures, and which are
/// sufficient (in conjunction with a private key) to sign the transaction
#[derive(Clone, PartialEq, Eq, Debug)]
#[deprecated(since="0.24.0", note="please use [sighash::SigHashCache] instead")]
#[deprecated(since = "0.24.0", note = "please use [sighash::SigHashCache] instead")]
pub struct SighashComponents {
tx_version: i32,
tx_locktime: u32,
@ -107,13 +107,13 @@ impl SighashComponents {
}
/// A replacement for SigHashComponents which supports all sighash modes
#[deprecated(since="0.27.0", note="please use [sighash::SigHashCache] instead")]
pub struct SigHashCache<R: Deref<Target=Transaction>> {
#[deprecated(since = "0.27.0", note = "please use [sighash::SigHashCache] instead")]
pub struct SigHashCache<R: Deref<Target = Transaction>> {
cache: sighash::SigHashCache<R>,
}
#[allow(deprecated)]
impl<R: Deref<Target=Transaction>> SigHashCache<R> {
impl<R: Deref<Target = Transaction>> SigHashCache<R> {
/// Compute the sighash components from an unsigned transaction and auxiliary
/// in a lazy manner when required.
/// For the generated sighashes to be valid, no fields in the transaction may change except for
@ -155,7 +155,7 @@ impl<R: Deref<Target=Transaction>> SigHashCache<R> {
}
#[allow(deprecated)]
impl<R: DerefMut<Target=Transaction>> SigHashCache<R> {
impl<R: DerefMut<Target = Transaction>> SigHashCache<R> {
/// When the SigHashCache is initialized with a mutable reference to a transaction instead of a
/// regular reference, this method is available to allow modification to the witnesses.
///

View File

@ -519,7 +519,7 @@ mod test {
use super::*;
extern crate serde_json;
use self::serde_json::{Value};
use self::serde_json::Value;
use consensus::encode::deserialize;
use std::collections::HashMap;
@ -576,7 +576,7 @@ mod test {
}
#[test]
fn test_filter () {
fn test_filter() {
let mut patterns = HashSet::new();
patterns.insert(Vec::from_hex("000000").unwrap());

View File

@ -486,11 +486,11 @@ impl fmt::Display for Error {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> {
if let Error::Secp256k1(ref e) = *self {
Some(e)
} else {
None
}
if let Error::Secp256k1(ref e) = *self {
Some(e)
} else {
None
}
}
}
@ -901,34 +901,26 @@ mod tests {
assert_eq!(indexed.child(ChildNumber::from_hardened_idx(2).unwrap()), path);
}
fn test_path<C: secp256k1::Signing + secp256k1::Verification>(secp: &Secp256k1<C>,
network: Network,
seed: &[u8],
path: DerivationPath,
expected_sk: &str,
expected_pk: &str) {
fn test_path<C: secp256k1::Signing + secp256k1::Verification>(
secp: &Secp256k1<C>,
network: Network,
seed: &[u8],
path: DerivationPath,
expected_sk: &str,
expected_pk: &str)
{
let mut sk = ExtendedPrivKey::new_master(network, seed).unwrap();
let mut pk = ExtendedPubKey::from_priv(secp, &sk);
// Check derivation convenience method for ExtendedPrivKey
assert_eq!(
&sk.derive_priv(secp, &path).unwrap().to_string()[..],
expected_sk
);
assert_eq!(&sk.derive_priv(secp, &path).unwrap().to_string()[..], expected_sk);
// Check derivation convenience method for ExtendedPubKey, should error
// appropriately if any ChildNumber is hardened
if path.0.iter().any(|cnum| cnum.is_hardened()) {
assert_eq!(
pk.derive_pub(secp, &path),
Err(Error::CannotDeriveFromHardenedKey)
);
assert_eq!(pk.derive_pub(secp, &path), Err(Error::CannotDeriveFromHardenedKey));
} else {
assert_eq!(
&pk.derive_pub(secp, &path).unwrap().to_string()[..],
expected_pk
);
assert_eq!(&pk.derive_pub(secp, &path).unwrap().to_string()[..], expected_pk);
}
// Derive keys, checking hardened and non-hardened derivation one-by-one

View File

@ -36,7 +36,8 @@ use consensus::encode::Encodable;
/// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root.
/// - `Some(merkle_root)` if length of `hashes` is greater than one.
pub fn bitcoin_merkle_root_inline<T>(hashes: &mut [T]) -> Option<T>
where T: Hash + Encodable,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
{
match hashes.len() {
@ -53,9 +54,10 @@ pub fn bitcoin_merkle_root_inline<T>(hashes: &mut [T]) -> Option<T>
/// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root.
/// - `Some(merkle_root)` if length of `hashes` is greater than one.
pub fn bitcoin_merkle_root<T, I>(mut hashes: I) -> Option<T>
where T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
I: Iterator<Item = T>,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
I: Iterator<Item = T>,
{
let first = hashes.next()?;
let second = match hashes.next() {
@ -84,8 +86,9 @@ pub fn bitcoin_merkle_root<T, I>(mut hashes: I) -> Option<T>
// `hashes` must contain at least one hash.
fn merkle_root_r<T>(hashes: &mut [T]) -> T
where T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
where
T: Hash + Encodable,
<T as Hash>::Engine: io::Write,
{
if hashes.len() == 1 {
return hashes[0]

View File

@ -31,7 +31,6 @@ use hashes::{Hash, hash160, hex, hex::FromHex};
use hash_types::{PubkeyHash, WPubkeyHash};
use util::base58;
/// A key-related error.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Error {
@ -45,7 +44,6 @@ pub enum Error {
Hex(hex::Error)
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
@ -158,14 +156,10 @@ impl PublicKey {
let mut bytes = [0; 65];
reader.read_exact(&mut bytes[0..1])?;
let bytes = if bytes[0] < 4 {
&mut bytes[..33]
} else {
&mut bytes[..65]
};
let bytes = if bytes[0] < 4 { &mut bytes[..33] } else { &mut bytes[..65] };
reader.read_exact(&mut bytes[1..])?;
Self::from_slice(bytes).map_err(|e|{
Self::from_slice(bytes).map_err(|e| {
// Need a static string for core2
#[cfg(feature = "std")]
let reason = e;
@ -189,10 +183,12 @@ impl PublicKey {
/// Deserialize a public key from a slice
pub fn from_slice(data: &[u8]) -> Result<PublicKey, Error> {
let compressed: bool = match data.len() {
let compressed = match data.len() {
33 => true,
65 => false,
len => { return Err(base58::Error::InvalidLength(len).into()); },
len => {
return Err(base58::Error::InvalidLength(len).into());
},
};
if !compressed && data[0] != 0x04 {
@ -285,10 +281,7 @@ impl PrivateKey {
/// Deserialize a private key from a slice
pub fn from_slice(data: &[u8], network: Network) -> Result<PrivateKey, Error> {
Ok(PrivateKey::new(
secp256k1::SecretKey::from_slice(data)?,
network,
))
Ok(PrivateKey::new(secp256k1::SecretKey::from_slice(data)?, network))
}
/// Format the private key to WIF format.
@ -323,13 +316,17 @@ impl PrivateKey {
let compressed = match data.len() {
33 => false,
34 => true,
_ => { return Err(Error::Base58(base58::Error::InvalidLength(data.len()))); }
_ => {
return Err(Error::Base58(base58::Error::InvalidLength(data.len())));
}
};
let network = match data[0] {
128 => Network::Bitcoin,
239 => Network::Testnet,
x => { return Err(Error::Base58(base58::Error::InvalidAddressVersion(x))); }
x => {
return Err(Error::Base58(base58::Error::InvalidAddressVersion(x)));
}
};
Ok(PrivateKey {

View File

@ -189,9 +189,7 @@ impl PartialMerkleTree {
}
// there can never be more hashes provided than one for every txid
if self.hashes.len() as u32 > self.num_transactions {
return Err(BadFormat(
"Proof contains more hashes than transactions".to_owned(),
));
return Err(BadFormat("Proof contains more hashes than transactions".to_owned()));
};
// there must be at least one bit per node in the partial tree, and at least one node per hash
if self.bits.len() < self.hashes.len() {
@ -246,13 +244,7 @@ impl PartialMerkleTree {
}
/// Recursive function that traverses tree nodes, storing the data as bits and hashes
fn traverse_and_build(
&mut self,
height: u32,
pos: u32,
txids: &[Txid],
matches: &[bool],
) {
fn traverse_and_build(&mut self, height: u32, pos: u32, txids: &[Txid], matches: &[bool]) {
// Determine whether this node is the parent of at least one matched txid
let mut parent_of_match = false;
let mut p = pos << height;
@ -348,10 +340,7 @@ impl PartialMerkleTree {
}
impl Encodable for PartialMerkleTree {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let ret = self.num_transactions.consensus_encode(&mut s)?
+ self.hashes.consensus_encode(&mut s)?;
let mut bytes: Vec<u8> = vec![0; (self.bits.len() + 7) / 8];
@ -432,7 +421,9 @@ impl MerkleBlock {
/// assert_eq!(txid, matches[0]);
/// ```
pub fn from_block_with_predicate<F>(block: &Block, match_txids: F) -> Self
where F: Fn(&Txid) -> bool {
where
F: Fn(&Txid) -> bool
{
let block_txids: Vec<_> = block.txdata.iter().map(Transaction::txid).collect();
Self::from_header_txids_with_predicate(&block.header, &block_txids, match_txids)
}
@ -440,7 +431,7 @@ impl MerkleBlock {
/// Create a MerkleBlock from a block, that contains proofs for specific txids.
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[deprecated(since="0.26.2", note="use from_block_with_predicate")]
#[deprecated(since = "0.26.2", note = "use from_block_with_predicate")]
pub fn from_block(block: &Block, match_txids: &::std::collections::HashSet<Txid>) -> Self {
Self::from_block_with_predicate(block, |t| match_txids.contains(t))
}
@ -453,7 +444,10 @@ impl MerkleBlock {
header: &BlockHeader,
block_txids: &[Txid],
match_txids: F,
) -> Self where F: Fn(&Txid) -> bool {
) -> Self
where
F: Fn(&Txid) -> bool
{
let matches: Vec<bool> = block_txids
.iter()
.map(match_txids)
@ -469,7 +463,7 @@ impl MerkleBlock {
/// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids.
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[deprecated(since="0.26.2", note="use from_header_txids_with_predicate")]
#[deprecated(since = "0.26.2", note = "use from_header_txids_with_predicate")]
pub fn from_header_txids(
header: &BlockHeader,
block_txids: &[Txid],
@ -497,10 +491,7 @@ impl MerkleBlock {
}
impl Encodable for MerkleBlock {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.header.consensus_encode(&mut s)?
+ self.txn.consensus_encode(s)?;
Ok(len)

View File

@ -217,8 +217,12 @@ mod message_signing {
/// instance of it, returning the number of instances removed.
/// Loops through the vector opcode by opcode, skipping pushed data.
pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
if needle.len() > haystack.len() { return 0; }
if needle.is_empty() { return 0; }
if needle.len() > haystack.len() {
return 0;
}
if needle.is_empty() {
return 0;
}
let mut top = haystack.len() - needle.len();
let mut n_deleted = 0;
@ -233,7 +237,9 @@ pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
// This is ugly but prevents infinite loop in case of overflow
let overflow = top < needle.len();
top = top.wrapping_sub(needle.len());
if overflow { break; }
if overflow {
break;
}
} else {
i += match opcodes::All::from((*haystack)[i]).classify(opcodes::ClassifyContext::Legacy) {
opcodes::Class::PushBytes(n) => n as usize + 1,

View File

@ -67,11 +67,11 @@ const PSBT_IN_TAP_SCRIPT_SIG: u8 = 0x14;
/// Type: Taproot Leaf Script PSBT_IN_TAP_LEAF_SCRIPT = 0x14
const PSBT_IN_TAP_LEAF_SCRIPT: u8 = 0x15;
/// Type: Taproot Key BIP 32 Derivation Path PSBT_IN_TAP_BIP32_DERIVATION = 0x16
const PSBT_IN_TAP_BIP32_DERIVATION : u8 = 0x16;
const PSBT_IN_TAP_BIP32_DERIVATION: u8 = 0x16;
/// Type: Taproot Internal Key PSBT_IN_TAP_INTERNAL_KEY = 0x17
const PSBT_IN_TAP_INTERNAL_KEY : u8 = 0x17;
const PSBT_IN_TAP_INTERNAL_KEY: u8 = 0x17;
/// Type: Taproot Merkle Root PSBT_IN_TAP_MERKLE_ROOT = 0x18
const PSBT_IN_TAP_MERKLE_ROOT : u8 = 0x18;
const PSBT_IN_TAP_MERKLE_ROOT: u8 = 0x18;
/// Type: Proprietary Use Type PSBT_IN_PROPRIETARY = 0xFC
const PSBT_IN_PROPRIETARY: u8 = 0xFC;
@ -133,9 +133,9 @@ pub struct Input {
#[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq"))]
pub tap_key_origins: BTreeMap<XOnlyPublicKey, (Vec<TapLeafHash>, KeySource)>,
/// Taproot Internal key.
pub tap_internal_key : Option<XOnlyPublicKey>,
pub tap_internal_key: Option<XOnlyPublicKey>,
/// Taproot Merkle root.
pub tap_merkle_root : Option<TapBranchHash>,
pub tap_merkle_root: Option<TapBranchHash>,
/// Proprietary key-value pairs for this input.
#[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq_byte_values"))]
pub proprietary: BTreeMap<raw::ProprietaryKey, Vec<u8>>,
@ -157,13 +157,13 @@ pub struct PsbtSigHashType {
impl From<EcdsaSigHashType> for PsbtSigHashType {
fn from(ecdsa_hash_ty: EcdsaSigHashType) -> Self {
PsbtSigHashType {inner: ecdsa_hash_ty as u32}
PsbtSigHashType { inner: ecdsa_hash_ty as u32 }
}
}
impl From<SchnorrSigHashType> for PsbtSigHashType {
fn from(schnorr_hash_ty: SchnorrSigHashType) -> Self {
PsbtSigHashType {inner: schnorr_hash_ty as u32}
PsbtSigHashType { inner: schnorr_hash_ty as u32 }
}
}
@ -289,7 +289,7 @@ impl Input {
self.tap_script_sigs <= <raw_key: (XOnlyPublicKey, TapLeafHash)>|<raw_value: SchnorrSig>
}
}
PSBT_IN_TAP_LEAF_SCRIPT=> {
PSBT_IN_TAP_LEAF_SCRIPT => {
impl_psbt_insert_pair! {
self.tap_scripts <= <raw_key: ControlBlock>|< raw_value: (Script, LeafVersion)>
}

View File

@ -32,16 +32,10 @@ pub(super) trait Map {
fn get_pairs(&self) -> Result<Vec<raw::Pair>, io::Error>;
/// Encodes map data with bitcoin consensus encoding.
fn consensus_encode_map<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode_map<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
for pair in Map::get_pairs(self)? {
len += encode::Encodable::consensus_encode(
&pair,
&mut s,
)?;
len += encode::Encodable::consensus_encode(&pair, &mut s)?;
}
Ok(len + encode::Encodable::consensus_encode(&0x00_u8, s)?)

View File

@ -214,7 +214,7 @@ mod display_from_str {
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl ::std::error::Error for PsbtParseError { }
impl ::std::error::Error for PsbtParseError {}
#[cfg_attr(docsrs, doc(cfg(feature = "base64")))]
impl Display for PartiallySignedTransaction {
@ -238,10 +238,7 @@ mod display_from_str {
pub use self::display_from_str::PsbtParseError;
impl Encodable for PartiallySignedTransaction {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += b"psbt".consensus_encode(&mut s)?;
@ -345,10 +342,7 @@ mod tests {
inputs: vec![],
outputs: vec![],
};
assert_eq!(
serialize_hex(&psbt),
"70736274ff01000a0200000000000000000000"
);
assert_eq!(serialize_hex(&psbt), "70736274ff01000a0200000000000000000000");
}
#[test]
@ -390,12 +384,8 @@ mod tests {
hd_keypaths.insert(pk.public_key, (fprint, dpath.into()));
let expected: Output = Output {
redeem_script: Some(hex_script!(
"76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac"
)),
witness_script: Some(hex_script!(
"a9143545e6e33b832c47050f24d3eeb93c9c03948bc787"
)),
redeem_script: Some(hex_script!("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac")),
witness_script: Some(hex_script!("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787")),
bip32_derivation: hd_keypaths,
..Default::default()
};
@ -441,13 +431,8 @@ mod tests {
version: 0,
proprietary: Default::default(),
unknown: Default::default(),
inputs: vec![
Input::default(),
],
outputs: vec![
Output::default(),
Output::default()
]
inputs: vec![Input::default()],
outputs: vec![Output::default(), Output::default()],
};
let actual: PartiallySignedTransaction = deserialize(&serialize(&expected)).unwrap();
@ -804,8 +789,7 @@ mod tests {
let psbt_non_witness_utxo = (&psbt.inputs[0].non_witness_utxo).as_ref().unwrap();
assert_eq!(tx_input.previous_output.txid, psbt_non_witness_utxo.txid());
assert!(
psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
assert!(psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
.script_pubkey
.is_p2pkh()
);
@ -871,9 +855,7 @@ mod tests {
let tx = &psbt.unsigned_tx;
assert_eq!(
tx.txid(),
Txid::from_hex(
"75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115"
).unwrap()
Txid::from_hex("75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115").unwrap(),
);
let mut unknown: BTreeMap<raw::Key, Vec<u8>> = BTreeMap::new();
@ -893,7 +875,6 @@ mod tests {
use super::*;
use super::serialize;
#[test]
fn invalid_vectors() {
let err = hex_psbt!("70736274ff010071020000000127744ababf3027fe0d6cf23a96eee2efb188ef52301954585883e69b6624b2420000000000ffffffff02787c01000000000016001483a7e34bd99ff03a4962ef8a1a101bb295461ece606b042a010000001600147ac369df1b20e033d6116623957b0ac49f3c52e8000000000001012b00f2052a010000002251205a2c2cf5b52cf31f83ad2e8da63ff03183ecd8f609c7510ae8a48e03910a075701172102fe349064c98d6e2a853fa3c9b12bd8b304a19c195c60efa7ee2393046d3fa232000000").unwrap_err();
@ -971,7 +952,7 @@ mod tests {
}
#[test]
fn serialize_and_deserialize_preimage_psbt(){
fn serialize_and_deserialize_preimage_psbt() {
// create a sha preimage map
let mut sha256_preimages = BTreeMap::new();
sha256_preimages.insert(sha256::Hash::hash(&[1u8, 2u8]), vec![1u8, 2u8]);
@ -1071,7 +1052,7 @@ mod tests {
unserialized.inputs[0].hash160_preimages = hash160_preimages;
unserialized.inputs[0].sha256_preimages = sha256_preimages;
let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap();
let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap();
assert_eq!(rtt, unserialized);
// Now add an ripemd160 with incorrect preimage
@ -1080,7 +1061,7 @@ mod tests {
unserialized.inputs[0].ripemd160_preimages = ripemd160_preimages;
// Now the roundtrip should fail as the preimage is incorrect.
let rtt : Result<PartiallySignedTransaction, _> = hex_psbt!(&serialize_hex(&unserialized));
let rtt: Result<PartiallySignedTransaction, _> = hex_psbt!(&serialize_hex(&unserialized));
assert!(rtt.is_err());
}
@ -1093,7 +1074,7 @@ mod tests {
key: b"test".to_vec(),
}, b"test".to_vec());
assert!(!psbt.proprietary.is_empty());
let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap();
let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap();
assert!(!rtt.proprietary.is_empty());
}

View File

@ -99,18 +99,12 @@ impl Decodable for Key {
key.push(Decodable::consensus_decode(&mut d)?);
}
Ok(Key {
type_value,
key,
})
Ok(Key { type_value, key })
}
}
impl Encodable for Key {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let mut len = 0;
len += VarInt((self.key.len() + 1) as u64).consensus_encode(&mut s)?;
@ -125,10 +119,7 @@ impl Encodable for Key {
}
impl Encodable for Pair {
fn consensus_encode<S: io::Write>(
&self,
mut s: S,
) -> Result<usize, io::Error> {
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
let len = self.key.consensus_encode(&mut s)?;
Ok(len + self.value.consensus_encode(s)?)
}
@ -159,11 +150,7 @@ impl<Subtype> Decodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u
let subtype = Subtype::from(d.read_u8()?);
let key = read_to_end(d)?;
Ok(ProprietaryKey {
prefix,
subtype,
key
})
Ok(ProprietaryKey { prefix, subtype, key })
}
}

View File

@ -446,7 +446,7 @@ impl TaprootBuilder {
Ok(TaprootSpendInfo::from_node_info(secp, internal_key, node))
}
pub(crate) fn branch(&self) -> &[Option<NodeInfo>]{
pub(crate) fn branch(&self) -> &[Option<NodeInfo>] {
&self.branch
}
@ -598,9 +598,7 @@ impl TaprootMerkleBranch {
if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 {
Err(TaprootError::InvalidMerkleBranchSize(sl.len()))
} else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT {
Err(TaprootError::InvalidMerkleTreeDepth(
sl.len() / TAPROOT_CONTROL_NODE_SIZE,
))
Err(TaprootError::InvalidMerkleTreeDepth(sl.len() / TAPROOT_CONTROL_NODE_SIZE))
} else {
let inner = sl
// TODO: Use chunks_exact after MSRV changes to 1.31
@ -717,8 +715,7 @@ impl ControlBlock {
/// applied when encoding this element as a witness.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(self.size());
self.encode(&mut buf)
.expect("writers don't error");
self.encode(&mut buf).expect("writers don't error");
buf
}
@ -874,8 +871,8 @@ impl fmt::UpperHex for LeafVersion {
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl ::serde::Serialize for LeafVersion {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
where
S: ::serde::Serializer,
{
serializer.serialize_u8(self.to_consensus())
}
@ -885,7 +882,10 @@ impl ::serde::Serialize for LeafVersion {
#[cfg(feature = "serde")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl<'de> ::serde::Deserialize<'de> for LeafVersion {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: ::serde::Deserializer<'de> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>
{
struct U8Visitor;
impl<'de> ::serde::de::Visitor<'de> for U8Visitor {
type Value = LeafVersion;
@ -936,11 +936,9 @@ impl fmt::Display for TaprootBuilderError {
"Attempted to create a tree with two nodes at depth 0. There must\
only be a exactly one node at depth 0",
),
TaprootBuilderError::InvalidMerkleTreeDepth(d) => write!(
f,
"Merkle Tree depth({}) must be less than {}",
d, TAPROOT_CONTROL_MAX_NODE_COUNT
),
TaprootBuilderError::InvalidMerkleTreeDepth(d) => {
write!(f, "Merkle Tree depth({}) must be less than {}", d, TAPROOT_CONTROL_MAX_NODE_COUNT)
}
TaprootBuilderError::InvalidInternalKey(e) => {
write!(f, "Invalid Internal XOnly key : {}", e)
}

View File

@ -19,7 +19,7 @@
//!
macro_rules! construct_uint {
($name:ident, $n_words:expr) => (
($name:ident, $n_words:expr) => {
/// Little-endian large integer type
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
pub struct $name(pub [u64; $n_words]);
@ -169,7 +169,9 @@ macro_rules! construct_uint {
let &mut $name(ref mut arr) = self;
for i in 0..$n_words {
arr[i] = arr[i].wrapping_add(1);
if arr[i] != 0 { break; }
if arr[i] != 0 {
break;
}
}
}
}
@ -188,8 +190,12 @@ macro_rules! construct_uint {
// and the auto derive is a lexicographic ordering(i.e. memcmp)
// which with numbers is equivalent to big-endian
for i in 0..$n_words {
if self[$n_words - 1 - i] < other[$n_words - 1 - i] { return ::core::cmp::Ordering::Less; }
if self[$n_words - 1 - i] > other[$n_words - 1 - i] { return ::core::cmp::Ordering::Greater; }
if self[$n_words - 1 - i] < other[$n_words - 1 - i] {
return ::core::cmp::Ordering::Less;
}
if self[$n_words - 1 - i] > other[$n_words - 1 - i] {
return ::core::cmp::Ordering::Greater;
}
}
::core::cmp::Ordering::Equal
}
@ -499,7 +505,7 @@ macro_rules! construct_uint {
}
}
}
);
};
}
construct_uint!(Uint256, 4);