Refactor whitespace

Do various whitespace refactorings, of note:

- Use space around equals e.g., 'since = "blah"'
- Put return/break/continue on separate line

Whitespace only, no logic changes.
This commit is contained in:
Tobin Harding 2022-01-24 11:26:29 +11:00
parent 1c502399f1
commit bf4f5638e0
21 changed files with 90 additions and 78 deletions

View File

@ -175,7 +175,7 @@ impl Block {
} }
/// check if merkle root of header matches merkle root of the transaction list /// check if merkle root of header matches merkle root of the transaction list
pub fn check_merkle_root (&self) -> bool { pub fn check_merkle_root(&self) -> bool {
match self.compute_merkle_root() { match self.compute_merkle_root() {
Some(merkle_root) => self.header.merkle_root == merkle_root, Some(merkle_root) => self.header.merkle_root == merkle_root,
None => false, None => false,
@ -229,7 +229,7 @@ impl Block {
} }
/// Computes the witness commitment for the block's transaction list. /// Computes the witness commitment for the block's transaction list.
pub fn compute_witness_commitment (witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment { pub fn compute_witness_commitment(witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment {
let mut encoder = WitnessCommitment::engine(); let mut encoder = WitnessCommitment::engine();
witness_root.consensus_encode(&mut encoder).expect("engines don't error"); witness_root.consensus_encode(&mut encoder).expect("engines don't error");
encoder.input(witness_reserved_value); encoder.input(witness_reserved_value);

View File

@ -632,10 +632,10 @@ impl fmt::Debug for All {
all::OP_NUMEQUAL => write!(f, "NUMEQUAL"), all::OP_NUMEQUAL => write!(f, "NUMEQUAL"),
all::OP_NUMEQUALVERIFY => write!(f, "NUMEQUALVERIFY"), all::OP_NUMEQUALVERIFY => write!(f, "NUMEQUALVERIFY"),
all::OP_NUMNOTEQUAL => write!(f, "NUMNOTEQUAL"), all::OP_NUMNOTEQUAL => write!(f, "NUMNOTEQUAL"),
all::OP_LESSTHAN => write!(f, "LESSTHAN"), all::OP_LESSTHAN => write!(f, "LESSTHAN"),
all::OP_GREATERTHAN => write!(f, "GREATERTHAN"), all::OP_GREATERTHAN => write!(f, "GREATERTHAN"),
all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"), all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"),
all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"), all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"),
all::OP_MIN => write!(f, "MIN"), all::OP_MIN => write!(f, "MIN"),
all::OP_MAX => write!(f, "MAX"), all::OP_MAX => write!(f, "MAX"),
all::OP_WITHIN => write!(f, "WITHIN"), all::OP_WITHIN => write!(f, "WITHIN"),

View File

@ -141,11 +141,11 @@ impl fmt::Display for Error {
Error::NonMinimalPush => "non-minimal datapush", Error::NonMinimalPush => "non-minimal datapush",
Error::EarlyEndOfScript => "unexpected end of script", Error::EarlyEndOfScript => "unexpected end of script",
Error::NumericOverflow => "numeric overflow (number on stack larger than 4 bytes)", Error::NumericOverflow => "numeric overflow (number on stack larger than 4 bytes)",
#[cfg(feature="bitcoinconsensus")] #[cfg(feature = "bitcoinconsensus")]
Error::BitcoinConsensus(ref _n) => "bitcoinconsensus verification failed", Error::BitcoinConsensus(ref _n) => "bitcoinconsensus verification failed",
#[cfg(feature="bitcoinconsensus")] #[cfg(feature = "bitcoinconsensus")]
Error::UnknownSpentOutput(ref _point) => "unknown spent output Transaction::verify()", Error::UnknownSpentOutput(ref _point) => "unknown spent output Transaction::verify()",
#[cfg(feature="bitcoinconsensus")] #[cfg(feature = "bitcoinconsensus")]
Error::SerializationError => "can not serialize the spending transaction in Transaction::verify()", Error::SerializationError => "can not serialize the spending transaction in Transaction::verify()",
}; };
f.write_str(str) f.write_str(str)
@ -717,7 +717,7 @@ impl<'a> Iterator for Instructions<'a> {
opcodes::Class::PushBytes(n) => { opcodes::Class::PushBytes(n) => {
let n = n as usize; let n = n as usize;
if self.data.len() < n + 1 { if self.data.len() < n + 1 {
self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors
return Some(Err(Error::EarlyEndOfScript)); return Some(Err(Error::EarlyEndOfScript));
} }
if self.enforce_minimal { if self.enforce_minimal {
@ -1395,7 +1395,7 @@ mod test {
#[test] #[test]
fn script_ord() { fn script_ord() {
let script_1 = Builder::new().push_slice(&[1,2,3,4]).into_script(); let script_1 = Builder::new().push_slice(&[1, 2, 3, 4]).into_script();
let script_2 = Builder::new().push_int(10).into_script(); let script_2 = Builder::new().push_int(10).into_script();
let script_3 = Builder::new().push_int(15).into_script(); let script_3 = Builder::new().push_int(15).into_script();
let script_4 = Builder::new().push_opcode(opcodes::all::OP_RETURN).into_script(); let script_4 = Builder::new().push_opcode(opcodes::all::OP_RETURN).into_script();
@ -1413,7 +1413,7 @@ mod test {
} }
#[test] #[test]
#[cfg(feature="bitcoinconsensus")] #[cfg(feature = "bitcoinconsensus")]
fn test_bitcoinconsensus () { fn test_bitcoinconsensus () {
// a random segwit transaction from the blockchain using native segwit // a random segwit transaction from the blockchain using native segwit
let spent = Builder::from(Vec::from_hex("0020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d").unwrap()).into_script(); let spent = Builder::from(Vec::from_hex("0020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d").unwrap()).into_script();

View File

@ -137,7 +137,7 @@ impl fmt::Display for ParseOutPointError {
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for ParseOutPointError { impl error::Error for ParseOutPointError {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
ParseOutPointError::Txid(ref e) => Some(e), ParseOutPointError::Txid(ref e) => Some(e),
@ -674,8 +674,8 @@ impl fmt::Display for NonStandardSigHashType {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl error::Error for NonStandardSigHashType {} impl error::Error for NonStandardSigHashType {}
/// Legacy Hashtype of an input's signature. /// Legacy Hashtype of an input's signature
#[deprecated(since="0.28.0", note="Please use [`EcdsaSigHashType`] instead")] #[deprecated(since = "0.28.0", note = "Please use [`EcdsaSigHashType`] instead")]
pub type SigHashType = EcdsaSigHashType; pub type SigHashType = EcdsaSigHashType;
/// Hashtype of an input's signature, encoded in the last byte of the signature. /// Hashtype of an input's signature, encoded in the last byte of the signature.
@ -736,17 +736,17 @@ impl EcdsaSigHashType {
/// Splits the sighash flag into the "real" sighash flag and the ANYONECANPAY boolean. /// Splits the sighash flag into the "real" sighash flag and the ANYONECANPAY boolean.
pub(crate) fn split_anyonecanpay_flag(self) -> (EcdsaSigHashType, bool) { pub(crate) fn split_anyonecanpay_flag(self) -> (EcdsaSigHashType, bool) {
match self { match self {
EcdsaSigHashType::All => (EcdsaSigHashType::All, false), EcdsaSigHashType::All => (EcdsaSigHashType::All, false),
EcdsaSigHashType::None => (EcdsaSigHashType::None, false), EcdsaSigHashType::None => (EcdsaSigHashType::None, false),
EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false), EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false),
EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true), EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true),
EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true), EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true),
EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true) EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true)
} }
} }
/// Reads a 4-byte uint32 as a sighash type. /// Reads a 4-byte uint32 as a sighash type.
#[deprecated(since="0.26.1", note="please use `from_u32_consensus` or `from_u32_standard` instead")] #[deprecated(since = "0.26.1", note = "please use `from_u32_consensus` or `from_u32_standard` instead")]
pub fn from_u32(n: u32) -> EcdsaSigHashType { pub fn from_u32(n: u32) -> EcdsaSigHashType {
Self::from_u32_consensus(n) Self::from_u32_consensus(n)
} }
@ -1486,7 +1486,7 @@ mod tests {
// test that we fail with repeated use of same input // test that we fail with repeated use of same input
let mut double_spending = spending.clone(); let mut double_spending = spending.clone();
let re_use = double_spending.input[0].clone(); let re_use = double_spending.input[0].clone();
double_spending.input.push (re_use); double_spending.input.push(re_use);
assert!(double_spending.verify(|point: &OutPoint| { assert!(double_spending.verify(|point: &OutPoint| {
if let Some(tx) = spent2.remove(&point.txid) { if let Some(tx) = spent2.remove(&point.txid) {

View File

@ -61,7 +61,7 @@ pub enum Error {
actual: u32, actual: u32,
}, },
/// Tried to allocate an oversized vector /// Tried to allocate an oversized vector
OversizedVectorAllocation{ OversizedVectorAllocation {
/// The capacity requested /// The capacity requested
requested: usize, requested: usize,
/// The maximum capacity /// The maximum capacity
@ -333,7 +333,7 @@ pub struct VarInt(pub u64);
pub struct CheckedData(pub Vec<u8>); pub struct CheckedData(pub Vec<u8>);
// Primitive types // Primitive types
macro_rules! impl_int_encodable{ macro_rules! impl_int_encodable {
($ty:ident, $meth_dec:ident, $meth_enc:ident) => ( ($ty:ident, $meth_dec:ident, $meth_enc:ident) => (
impl Decodable for $ty { impl Decodable for $ty {
#[inline] #[inline]
@ -439,7 +439,6 @@ impl Decodable for VarInt {
} }
} }
// Booleans // Booleans
impl Encodable for bool { impl Encodable for bool {
#[inline] #[inline]
@ -1033,7 +1032,7 @@ mod tests {
let witness = vec![vec![0u8; 3_999_999]; 2]; let witness = vec![vec![0u8; 3_999_999]; 2];
let ser = serialize(&witness); let ser = serialize(&witness);
let mut reader = io::Cursor::new(ser); let mut reader = io::Cursor::new(ser);
let err = Vec::<Vec<u8>>::consensus_decode(&mut reader); let err = Vec::<Vec<u8>>::consensus_decode(&mut reader);
assert!(err.is_err()); assert!(err.is_err());
} }

View File

@ -38,7 +38,7 @@ pub struct Address {
pub port: u16 pub port: u16
} }
const ONION : [u16; 3] = [0xFD87, 0xD87E, 0xEB43]; const ONION: [u16; 3] = [0xFD87, 0xD87E, 0xEB43];
impl Address { impl Address {
/// Create an address message for a socket /// Create an address message for a socket
@ -287,7 +287,7 @@ impl Encodable for AddrV2Message {
impl Decodable for AddrV2Message { impl Decodable for AddrV2Message {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> { fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> {
Ok(AddrV2Message{ Ok(AddrV2Message {
time: Decodable::consensus_decode(&mut d)?, time: Decodable::consensus_decode(&mut d)?,
services: ServiceFlags::from(VarInt::consensus_decode(&mut d)?.0), services: ServiceFlags::from(VarInt::consensus_decode(&mut d)?.0),
addr: Decodable::consensus_decode(&mut d)?, addr: Decodable::consensus_decode(&mut d)?,

View File

@ -385,4 +385,3 @@ mod tests {
assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string()); assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string());
} }
} }

View File

@ -116,7 +116,7 @@ impl fmt::Display for CommandStringError {
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[cfg(feature = "std")] #[cfg(feature = "std")]
impl ::std::error::Error for CommandStringError { } impl ::std::error::Error for CommandStringError {}
/// A Network message /// A Network message
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]

View File

@ -41,7 +41,7 @@ impl<R: Read> fmt::Debug for StreamReader<R> {
impl<R: Read> StreamReader<R> { impl<R: Read> StreamReader<R> {
/// Constructs new stream reader for a given input stream `stream` /// Constructs new stream reader for a given input stream `stream`
#[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")] #[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
pub fn new(stream: R, _buffer_size: Option<usize>) -> StreamReader<R> { pub fn new(stream: R, _buffer_size: Option<usize>) -> StreamReader<R> {
StreamReader { StreamReader {
stream: BufReader::new(stream), stream: BufReader::new(stream),
@ -49,7 +49,7 @@ impl<R: Read> StreamReader<R> {
} }
/// Reads stream and parses next message from its current input /// Reads stream and parses next message from its current input
#[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")] #[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")]
pub fn read_next<D: Decodable>(&mut self) -> Result<D, encode::Error> { pub fn read_next<D: Decodable>(&mut self) -> Result<D, encode::Error> {
Decodable::consensus_decode(&mut self.stream) Decodable::consensus_decode(&mut self.stream)
} }

View File

@ -27,4 +27,3 @@ macro_rules! serde_round_trip (
assert_eq!($var, decoded); assert_eq!($var, decoded);
}) })
); );

View File

@ -1264,7 +1264,7 @@ mod tests {
} }
#[test] #[test]
fn p2tr_from_untweaked(){ fn p2tr_from_untweaked() {
//Test case from BIP-086 //Test case from BIP-086
let internal_key = XOnlyPublicKey::from_str("cc8a4bc64d897bddc5fbc2f670f7a8ba0b386779106cf1223c6fc5d7cd6fc115").unwrap(); let internal_key = XOnlyPublicKey::from_str("cc8a4bc64d897bddc5fbc2f670f7a8ba0b386779106cf1223c6fc5d7cd6fc115").unwrap();
let secp = Secp256k1::verification_only(); let secp = Secp256k1::verification_only();

View File

@ -175,7 +175,7 @@ impl fmt::Display for ParseAmountError {
ParseAmountError::PossiblyConfusingDenomination(ref d) => { ParseAmountError::PossiblyConfusingDenomination(ref d) => {
let (letter, upper, lower) = match d.chars().next() { let (letter, upper, lower) = match d.chars().next() {
Some('M') => ('M', "Mega", "milli"), Some('M') => ('M', "Mega", "milli"),
Some('P') => ('P',"Peta", "pico"), Some('P') => ('P', "Peta", "pico"),
// This panic could be avoided by adding enum ConfusingDenomination { Mega, Peta } but is it worth it? // This panic could be avoided by adding enum ConfusingDenomination { Mega, Peta } but is it worth it?
_ => panic!("invalid error information"), _ => panic!("invalid error information"),
}; };
@ -599,7 +599,7 @@ impl FromStr for Amount {
} }
impl ::core::iter::Sum for Amount { impl ::core::iter::Sum for Amount {
fn sum<I: Iterator<Item=Self>>(iter: I) -> Self { fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats: u64 = iter.map(|amt| amt.0).sum(); let sats: u64 = iter.map(|amt| amt.0).sum();
Amount::from_sat(sats) Amount::from_sat(sats)
} }
@ -933,7 +933,7 @@ impl FromStr for SignedAmount {
} }
impl ::core::iter::Sum for SignedAmount { impl ::core::iter::Sum for SignedAmount {
fn sum<I: Iterator<Item=Self>>(iter: I) -> Self { fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let sats: i64 = iter.map(|amt| amt.0).sum(); let sats: i64 = iter.map(|amt| amt.0).sum();
SignedAmount::from_sat(sats) SignedAmount::from_sat(sats)
} }
@ -1187,7 +1187,7 @@ pub mod serde {
) -> Result<Option<A>, D::Error> { ) -> Result<Option<A>, D::Error> {
struct VisitOptAmt<X>(PhantomData<X>); struct VisitOptAmt<X>(PhantomData<X>);
impl<'de, X :SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt<X> { impl<'de, X: SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt<X> {
type Value = Option<X>; type Value = Option<X>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
@ -1398,12 +1398,12 @@ mod tests {
let sa = SignedAmount::from_sat; let sa = SignedAmount::from_sat;
let ua = Amount::from_sat; let ua = Amount::from_sat;
assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig)); assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig));
assert_eq!(ua(i64::max_value() as u64).to_signed(), Ok(sa(i64::max_value()))); assert_eq!(ua(i64::max_value() as u64).to_signed(), Ok(sa(i64::max_value())));
assert_eq!(ua(0).to_signed(), Ok(sa(0))); assert_eq!(ua(0).to_signed(), Ok(sa(0)));
assert_eq!(ua(1).to_signed(), Ok( sa(1))); assert_eq!(ua(1).to_signed(), Ok( sa(1)));
assert_eq!(ua(1).to_signed(), Ok(sa(1))); assert_eq!(ua(1).to_signed(), Ok(sa(1)));
assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig)); assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig));
assert_eq!(sa(-1).to_unsigned(), Err(E::Negative)); assert_eq!(sa(-1).to_unsigned(), Err(E::Negative));
assert_eq!(sa(i64::max_value()).to_unsigned(), Ok(ua(i64::max_value() as u64))); assert_eq!(sa(i64::max_value()).to_unsigned(), Ok(ua(i64::max_value() as u64)));

View File

@ -33,7 +33,7 @@ use util::sighash;
/// Parts of a sighash which are common across inputs or signatures, and which are /// Parts of a sighash which are common across inputs or signatures, and which are
/// sufficient (in conjunction with a private key) to sign the transaction /// sufficient (in conjunction with a private key) to sign the transaction
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
#[deprecated(since="0.24.0", note="please use [sighash::SigHashCache] instead")] #[deprecated(since = "0.24.0", note = "please use [sighash::SigHashCache] instead")]
pub struct SighashComponents { pub struct SighashComponents {
tx_version: i32, tx_version: i32,
tx_locktime: u32, tx_locktime: u32,
@ -107,13 +107,13 @@ impl SighashComponents {
} }
/// A replacement for SigHashComponents which supports all sighash modes /// A replacement for SigHashComponents which supports all sighash modes
#[deprecated(since="0.27.0", note="please use [sighash::SigHashCache] instead")] #[deprecated(since = "0.27.0", note = "please use [sighash::SigHashCache] instead")]
pub struct SigHashCache<R: Deref<Target=Transaction>> { pub struct SigHashCache<R: Deref<Target = Transaction>> {
cache: sighash::SigHashCache<R>, cache: sighash::SigHashCache<R>,
} }
#[allow(deprecated)] #[allow(deprecated)]
impl<R: Deref<Target=Transaction>> SigHashCache<R> { impl<R: Deref<Target = Transaction>> SigHashCache<R> {
/// Compute the sighash components from an unsigned transaction and auxiliary /// Compute the sighash components from an unsigned transaction and auxiliary
/// in a lazy manner when required. /// in a lazy manner when required.
/// For the generated sighashes to be valid, no fields in the transaction may change except for /// For the generated sighashes to be valid, no fields in the transaction may change except for
@ -155,7 +155,7 @@ impl<R: Deref<Target=Transaction>> SigHashCache<R> {
} }
#[allow(deprecated)] #[allow(deprecated)]
impl<R: DerefMut<Target=Transaction>> SigHashCache<R> { impl<R: DerefMut<Target = Transaction>> SigHashCache<R> {
/// When the SigHashCache is initialized with a mutable reference to a transaction instead of a /// When the SigHashCache is initialized with a mutable reference to a transaction instead of a
/// regular reference, this method is available to allow modification to the witnesses. /// regular reference, this method is available to allow modification to the witnesses.
/// ///

View File

@ -576,7 +576,7 @@ mod test {
} }
#[test] #[test]
fn test_filter () { fn test_filter() {
let mut patterns = HashSet::new(); let mut patterns = HashSet::new();
patterns.insert(Vec::from_hex("000000").unwrap()); patterns.insert(Vec::from_hex("000000").unwrap());

View File

@ -31,7 +31,6 @@ use hashes::{Hash, hash160, hex, hex::FromHex};
use hash_types::{PubkeyHash, WPubkeyHash}; use hash_types::{PubkeyHash, WPubkeyHash};
use util::base58; use util::base58;
/// A key-related error. /// A key-related error.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Error { pub enum Error {
@ -45,7 +44,6 @@ pub enum Error {
Hex(hex::Error) Hex(hex::Error)
} }
impl fmt::Display for Error { impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
@ -165,7 +163,7 @@ impl PublicKey {
}; };
reader.read_exact(&mut bytes[1..])?; reader.read_exact(&mut bytes[1..])?;
Self::from_slice(bytes).map_err(|e|{ Self::from_slice(bytes).map_err(|e| {
// Need a static string for core2 // Need a static string for core2
#[cfg(feature = "std")] #[cfg(feature = "std")]
let reason = e; let reason = e;
@ -192,7 +190,9 @@ impl PublicKey {
let compressed: bool = match data.len() { let compressed: bool = match data.len() {
33 => true, 33 => true,
65 => false, 65 => false,
len => { return Err(base58::Error::InvalidLength(len).into()); }, len => {
return Err(base58::Error::InvalidLength(len).into());
},
}; };
if !compressed && data[0] != 0x04 { if !compressed && data[0] != 0x04 {
@ -323,13 +323,17 @@ impl PrivateKey {
let compressed = match data.len() { let compressed = match data.len() {
33 => false, 33 => false,
34 => true, 34 => true,
_ => { return Err(Error::Base58(base58::Error::InvalidLength(data.len()))); } _ => {
return Err(Error::Base58(base58::Error::InvalidLength(data.len())));
}
}; };
let network = match data[0] { let network = match data[0] {
128 => Network::Bitcoin, 128 => Network::Bitcoin,
239 => Network::Testnet, 239 => Network::Testnet,
x => { return Err(Error::Base58(base58::Error::InvalidAddressVersion(x))); } x => {
return Err(Error::Base58(base58::Error::InvalidAddressVersion(x)));
}
}; };
Ok(PrivateKey { Ok(PrivateKey {

View File

@ -440,7 +440,7 @@ impl MerkleBlock {
/// Create a MerkleBlock from a block, that contains proofs for specific txids. /// Create a MerkleBlock from a block, that contains proofs for specific txids.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[deprecated(since="0.26.2", note="use from_block_with_predicate")] #[deprecated(since = "0.26.2", note = "use from_block_with_predicate")]
pub fn from_block(block: &Block, match_txids: &::std::collections::HashSet<Txid>) -> Self { pub fn from_block(block: &Block, match_txids: &::std::collections::HashSet<Txid>) -> Self {
Self::from_block_with_predicate(block, |t| match_txids.contains(t)) Self::from_block_with_predicate(block, |t| match_txids.contains(t))
} }
@ -469,7 +469,7 @@ impl MerkleBlock {
/// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids. /// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
#[deprecated(since="0.26.2", note="use from_header_txids_with_predicate")] #[deprecated(since = "0.26.2", note = "use from_header_txids_with_predicate")]
pub fn from_header_txids( pub fn from_header_txids(
header: &BlockHeader, header: &BlockHeader,
block_txids: &[Txid], block_txids: &[Txid],

View File

@ -217,8 +217,12 @@ mod message_signing {
/// instance of it, returning the number of instances removed. /// instance of it, returning the number of instances removed.
/// Loops through the vector opcode by opcode, skipping pushed data. /// Loops through the vector opcode by opcode, skipping pushed data.
pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize { pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
if needle.len() > haystack.len() { return 0; } if needle.len() > haystack.len() {
if needle.is_empty() { return 0; } return 0;
}
if needle.is_empty() {
return 0;
}
let mut top = haystack.len() - needle.len(); let mut top = haystack.len() - needle.len();
let mut n_deleted = 0; let mut n_deleted = 0;
@ -233,7 +237,9 @@ pub fn script_find_and_remove(haystack: &mut Vec<u8>, needle: &[u8]) -> usize {
// This is ugly but prevents infinite loop in case of overflow // This is ugly but prevents infinite loop in case of overflow
let overflow = top < needle.len(); let overflow = top < needle.len();
top = top.wrapping_sub(needle.len()); top = top.wrapping_sub(needle.len());
if overflow { break; } if overflow {
break;
}
} else { } else {
i += match opcodes::All::from((*haystack)[i]).classify(opcodes::ClassifyContext::Legacy) { i += match opcodes::All::from((*haystack)[i]).classify(opcodes::ClassifyContext::Legacy) {
opcodes::Class::PushBytes(n) => n as usize + 1, opcodes::Class::PushBytes(n) => n as usize + 1,

View File

@ -67,11 +67,11 @@ const PSBT_IN_TAP_SCRIPT_SIG: u8 = 0x14;
/// Type: Taproot Leaf Script PSBT_IN_TAP_LEAF_SCRIPT = 0x14 /// Type: Taproot Leaf Script PSBT_IN_TAP_LEAF_SCRIPT = 0x14
const PSBT_IN_TAP_LEAF_SCRIPT: u8 = 0x15; const PSBT_IN_TAP_LEAF_SCRIPT: u8 = 0x15;
/// Type: Taproot Key BIP 32 Derivation Path PSBT_IN_TAP_BIP32_DERIVATION = 0x16 /// Type: Taproot Key BIP 32 Derivation Path PSBT_IN_TAP_BIP32_DERIVATION = 0x16
const PSBT_IN_TAP_BIP32_DERIVATION : u8 = 0x16; const PSBT_IN_TAP_BIP32_DERIVATION: u8 = 0x16;
/// Type: Taproot Internal Key PSBT_IN_TAP_INTERNAL_KEY = 0x17 /// Type: Taproot Internal Key PSBT_IN_TAP_INTERNAL_KEY = 0x17
const PSBT_IN_TAP_INTERNAL_KEY : u8 = 0x17; const PSBT_IN_TAP_INTERNAL_KEY: u8 = 0x17;
/// Type: Taproot Merkle Root PSBT_IN_TAP_MERKLE_ROOT = 0x18 /// Type: Taproot Merkle Root PSBT_IN_TAP_MERKLE_ROOT = 0x18
const PSBT_IN_TAP_MERKLE_ROOT : u8 = 0x18; const PSBT_IN_TAP_MERKLE_ROOT: u8 = 0x18;
/// Type: Proprietary Use Type PSBT_IN_PROPRIETARY = 0xFC /// Type: Proprietary Use Type PSBT_IN_PROPRIETARY = 0xFC
const PSBT_IN_PROPRIETARY: u8 = 0xFC; const PSBT_IN_PROPRIETARY: u8 = 0xFC;
@ -133,9 +133,9 @@ pub struct Input {
#[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq"))] #[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq"))]
pub tap_key_origins: BTreeMap<XOnlyPublicKey, (Vec<TapLeafHash>, KeySource)>, pub tap_key_origins: BTreeMap<XOnlyPublicKey, (Vec<TapLeafHash>, KeySource)>,
/// Taproot Internal key. /// Taproot Internal key.
pub tap_internal_key : Option<XOnlyPublicKey>, pub tap_internal_key: Option<XOnlyPublicKey>,
/// Taproot Merkle root. /// Taproot Merkle root.
pub tap_merkle_root : Option<TapBranchHash>, pub tap_merkle_root: Option<TapBranchHash>,
/// Proprietary key-value pairs for this input. /// Proprietary key-value pairs for this input.
#[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq_byte_values"))] #[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq_byte_values"))]
pub proprietary: BTreeMap<raw::ProprietaryKey, Vec<u8>>, pub proprietary: BTreeMap<raw::ProprietaryKey, Vec<u8>>,
@ -157,13 +157,13 @@ pub struct PsbtSigHashType {
impl From<EcdsaSigHashType> for PsbtSigHashType { impl From<EcdsaSigHashType> for PsbtSigHashType {
fn from(ecdsa_hash_ty: EcdsaSigHashType) -> Self { fn from(ecdsa_hash_ty: EcdsaSigHashType) -> Self {
PsbtSigHashType {inner: ecdsa_hash_ty as u32} PsbtSigHashType { inner: ecdsa_hash_ty as u32 }
} }
} }
impl From<SchnorrSigHashType> for PsbtSigHashType { impl From<SchnorrSigHashType> for PsbtSigHashType {
fn from(schnorr_hash_ty: SchnorrSigHashType) -> Self { fn from(schnorr_hash_ty: SchnorrSigHashType) -> Self {
PsbtSigHashType {inner: schnorr_hash_ty as u32} PsbtSigHashType { inner: schnorr_hash_ty as u32 }
} }
} }
@ -289,7 +289,7 @@ impl Input {
self.tap_script_sigs <= <raw_key: (XOnlyPublicKey, TapLeafHash)>|<raw_value: SchnorrSig> self.tap_script_sigs <= <raw_key: (XOnlyPublicKey, TapLeafHash)>|<raw_value: SchnorrSig>
} }
} }
PSBT_IN_TAP_LEAF_SCRIPT=> { PSBT_IN_TAP_LEAF_SCRIPT => {
impl_psbt_insert_pair! { impl_psbt_insert_pair! {
self.tap_scripts <= <raw_key: ControlBlock>|< raw_value: (Script, LeafVersion)> self.tap_scripts <= <raw_key: ControlBlock>|< raw_value: (Script, LeafVersion)>
} }

View File

@ -214,7 +214,7 @@ mod display_from_str {
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg_attr(docsrs, doc(cfg(feature = "std")))]
impl ::std::error::Error for PsbtParseError { } impl ::std::error::Error for PsbtParseError {}
#[cfg_attr(docsrs, doc(cfg(feature = "base64")))] #[cfg_attr(docsrs, doc(cfg(feature = "base64")))]
impl Display for PartiallySignedTransaction { impl Display for PartiallySignedTransaction {
@ -893,7 +893,6 @@ mod tests {
use super::*; use super::*;
use super::serialize; use super::serialize;
#[test] #[test]
fn invalid_vectors() { fn invalid_vectors() {
let err = hex_psbt!("70736274ff010071020000000127744ababf3027fe0d6cf23a96eee2efb188ef52301954585883e69b6624b2420000000000ffffffff02787c01000000000016001483a7e34bd99ff03a4962ef8a1a101bb295461ece606b042a010000001600147ac369df1b20e033d6116623957b0ac49f3c52e8000000000001012b00f2052a010000002251205a2c2cf5b52cf31f83ad2e8da63ff03183ecd8f609c7510ae8a48e03910a075701172102fe349064c98d6e2a853fa3c9b12bd8b304a19c195c60efa7ee2393046d3fa232000000").unwrap_err(); let err = hex_psbt!("70736274ff010071020000000127744ababf3027fe0d6cf23a96eee2efb188ef52301954585883e69b6624b2420000000000ffffffff02787c01000000000016001483a7e34bd99ff03a4962ef8a1a101bb295461ece606b042a010000001600147ac369df1b20e033d6116623957b0ac49f3c52e8000000000001012b00f2052a010000002251205a2c2cf5b52cf31f83ad2e8da63ff03183ecd8f609c7510ae8a48e03910a075701172102fe349064c98d6e2a853fa3c9b12bd8b304a19c195c60efa7ee2393046d3fa232000000").unwrap_err();
@ -971,7 +970,7 @@ mod tests {
} }
#[test] #[test]
fn serialize_and_deserialize_preimage_psbt(){ fn serialize_and_deserialize_preimage_psbt() {
// create a sha preimage map // create a sha preimage map
let mut sha256_preimages = BTreeMap::new(); let mut sha256_preimages = BTreeMap::new();
sha256_preimages.insert(sha256::Hash::hash(&[1u8, 2u8]), vec![1u8, 2u8]); sha256_preimages.insert(sha256::Hash::hash(&[1u8, 2u8]), vec![1u8, 2u8]);
@ -1071,7 +1070,7 @@ mod tests {
unserialized.inputs[0].hash160_preimages = hash160_preimages; unserialized.inputs[0].hash160_preimages = hash160_preimages;
unserialized.inputs[0].sha256_preimages = sha256_preimages; unserialized.inputs[0].sha256_preimages = sha256_preimages;
let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap(); let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap();
assert_eq!(rtt, unserialized); assert_eq!(rtt, unserialized);
// Now add an ripemd160 with incorrect preimage // Now add an ripemd160 with incorrect preimage
@ -1080,7 +1079,7 @@ mod tests {
unserialized.inputs[0].ripemd160_preimages = ripemd160_preimages; unserialized.inputs[0].ripemd160_preimages = ripemd160_preimages;
// Now the roundtrip should fail as the preimage is incorrect. // Now the roundtrip should fail as the preimage is incorrect.
let rtt : Result<PartiallySignedTransaction, _> = hex_psbt!(&serialize_hex(&unserialized)); let rtt: Result<PartiallySignedTransaction, _> = hex_psbt!(&serialize_hex(&unserialized));
assert!(rtt.is_err()); assert!(rtt.is_err());
} }
@ -1093,7 +1092,7 @@ mod tests {
key: b"test".to_vec(), key: b"test".to_vec(),
}, b"test".to_vec()); }, b"test".to_vec());
assert!(!psbt.proprietary.is_empty()); assert!(!psbt.proprietary.is_empty());
let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap(); let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap();
assert!(!rtt.proprietary.is_empty()); assert!(!rtt.proprietary.is_empty());
} }

View File

@ -446,7 +446,7 @@ impl TaprootBuilder {
Ok(TaprootSpendInfo::from_node_info(secp, internal_key, node)) Ok(TaprootSpendInfo::from_node_info(secp, internal_key, node))
} }
pub(crate) fn branch(&self) -> &[Option<NodeInfo>]{ pub(crate) fn branch(&self) -> &[Option<NodeInfo>] {
&self.branch &self.branch
} }

View File

@ -169,7 +169,9 @@ macro_rules! construct_uint {
let &mut $name(ref mut arr) = self; let &mut $name(ref mut arr) = self;
for i in 0..$n_words { for i in 0..$n_words {
arr[i] = arr[i].wrapping_add(1); arr[i] = arr[i].wrapping_add(1);
if arr[i] != 0 { break; } if arr[i] != 0 {
break;
}
} }
} }
} }
@ -188,8 +190,12 @@ macro_rules! construct_uint {
// and the auto derive is a lexicographic ordering(i.e. memcmp) // and the auto derive is a lexicographic ordering(i.e. memcmp)
// which with numbers is equivalent to big-endian // which with numbers is equivalent to big-endian
for i in 0..$n_words { for i in 0..$n_words {
if self[$n_words - 1 - i] < other[$n_words - 1 - i] { return ::core::cmp::Ordering::Less; } if self[$n_words - 1 - i] < other[$n_words - 1 - i] {
if self[$n_words - 1 - i] > other[$n_words - 1 - i] { return ::core::cmp::Ordering::Greater; } return ::core::cmp::Ordering::Less;
}
if self[$n_words - 1 - i] > other[$n_words - 1 - i] {
return ::core::cmp::Ordering::Greater;
}
} }
::core::cmp::Ordering::Equal ::core::cmp::Ordering::Equal
} }