Use less vertical lines
In this library we specifically do not use rustfmt and tend to favour terse statements that do not use extra lines unnecessarily. In order to help new devs understand the style modify code that seems to use an unnecessary number of lines. None of these changes should reduce the readability of the code.
This commit is contained in:
parent
a5c06e0a96
commit
71cf00a314
|
@ -1355,38 +1355,19 @@ mod test {
|
||||||
let slop_v_nonmin: Result<Vec<Instruction>, Error> = nonminimal.instructions().collect();
|
let slop_v_nonmin: Result<Vec<Instruction>, Error> = nonminimal.instructions().collect();
|
||||||
let slop_v_nonmin_alt: Result<Vec<Instruction>, Error> = nonminimal_alt.instructions().collect();
|
let slop_v_nonmin_alt: Result<Vec<Instruction>, Error> = nonminimal_alt.instructions().collect();
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(v_zero.unwrap(), vec![Instruction::PushBytes(&[])]);
|
||||||
v_zero.unwrap(),
|
assert_eq!(v_zeropush.unwrap(), vec![Instruction::PushBytes(&[0])]);
|
||||||
vec![
|
|
||||||
Instruction::PushBytes(&[]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
v_zeropush.unwrap(),
|
|
||||||
vec![
|
|
||||||
Instruction::PushBytes(&[0]),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
v_min.clone().unwrap(),
|
v_min.clone().unwrap(),
|
||||||
vec![
|
vec![Instruction::PushBytes(&[105]), Instruction::Op(opcodes::OP_NOP3)]
|
||||||
Instruction::PushBytes(&[105]),
|
|
||||||
Instruction::Op(opcodes::OP_NOP3),
|
|
||||||
]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(v_nonmin.err().unwrap(), Error::NonMinimalPush);
|
||||||
v_nonmin.err().unwrap(),
|
|
||||||
Error::NonMinimalPush
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
v_nonmin_alt.clone().unwrap(),
|
v_nonmin_alt.clone().unwrap(),
|
||||||
vec![
|
vec![Instruction::PushBytes(&[105, 0]), Instruction::Op(opcodes::OP_NOP3)]
|
||||||
Instruction::PushBytes(&[105, 0]),
|
|
||||||
Instruction::Op(opcodes::OP_NOP3),
|
|
||||||
]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(v_min.clone().unwrap(), slop_v_min.unwrap());
|
assert_eq!(v_min.clone().unwrap(), slop_v_min.unwrap());
|
||||||
|
|
|
@ -59,10 +59,7 @@ impl OutPoint {
|
||||||
/// Creates a new [`OutPoint`].
|
/// Creates a new [`OutPoint`].
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn new(txid: Txid, vout: u32) -> OutPoint {
|
pub fn new(txid: Txid, vout: u32) -> OutPoint {
|
||||||
OutPoint {
|
OutPoint { txid, vout }
|
||||||
txid,
|
|
||||||
vout,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a "null" `OutPoint`.
|
/// Creates a "null" `OutPoint`.
|
||||||
|
@ -639,9 +636,7 @@ impl Decodable for Transaction {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We don't support anything else
|
// We don't support anything else
|
||||||
x => {
|
x => Err(encode::Error::UnsupportedSegwitFlag(x)),
|
||||||
Err(encode::Error::UnsupportedSegwitFlag(x))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// non-segwit
|
// non-segwit
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -202,10 +202,8 @@ impl Witness {
|
||||||
self.last = self.content.len();
|
self.last = self.content.len();
|
||||||
let element_len_varint = VarInt(new_element.len() as u64);
|
let element_len_varint = VarInt(new_element.len() as u64);
|
||||||
let current_content_len = self.content.len();
|
let current_content_len = self.content.len();
|
||||||
self.content.resize(
|
self.content
|
||||||
current_content_len + element_len_varint.len() + new_element.len(),
|
.resize(current_content_len + element_len_varint.len() + new_element.len(), 0);
|
||||||
0,
|
|
||||||
);
|
|
||||||
let end_varint = current_content_len + element_len_varint.len();
|
let end_varint = current_content_len + element_len_varint.len();
|
||||||
element_len_varint
|
element_len_varint
|
||||||
.consensus_encode(&mut self.content[current_content_len..end_varint])
|
.consensus_encode(&mut self.content[current_content_len..end_varint])
|
||||||
|
@ -359,14 +357,9 @@ mod test {
|
||||||
for (i, wit_el) in tx.input[0].witness.iter().enumerate() {
|
for (i, wit_el) in tx.input[0].witness.iter().enumerate() {
|
||||||
assert_eq!(expected_wit[i], wit_el.to_hex());
|
assert_eq!(expected_wit[i], wit_el.to_hex());
|
||||||
}
|
}
|
||||||
assert_eq!(
|
assert_eq!(expected_wit[1], tx.input[0].witness.last().unwrap().to_hex());
|
||||||
expected_wit[1],
|
assert_eq!(expected_wit[0], tx.input[0].witness.second_to_last().unwrap().to_hex());
|
||||||
tx.input[0].witness.last().unwrap().to_hex()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
expected_wit[0],
|
|
||||||
tx.input[0].witness.second_to_last().unwrap().to_hex()
|
|
||||||
);
|
|
||||||
let tx_bytes_back = serialize(&tx);
|
let tx_bytes_back = serialize(&tx);
|
||||||
assert_eq!(tx_bytes_back, tx_bytes);
|
assert_eq!(tx_bytes_back, tx_bytes);
|
||||||
}
|
}
|
||||||
|
|
|
@ -164,9 +164,7 @@ pub fn deserialize<T: Decodable>(data: &[u8]) -> Result<T, Error> {
|
||||||
|
|
||||||
/// Deserialize an object from a vector, but will not report an error if said deserialization
|
/// Deserialize an object from a vector, but will not report an error if said deserialization
|
||||||
/// doesn't consume the entire vector.
|
/// doesn't consume the entire vector.
|
||||||
pub fn deserialize_partial<T: Decodable>(
|
pub fn deserialize_partial<T: Decodable>(data: &[u8]) -> Result<(T, usize), Error> {
|
||||||
data: &[u8],
|
|
||||||
) -> Result<(T, usize), Error> {
|
|
||||||
let mut decoder = Cursor::new(data);
|
let mut decoder = Cursor::new(data);
|
||||||
let rv = Decodable::consensus_decode(&mut decoder)?;
|
let rv = Decodable::consensus_decode(&mut decoder)?;
|
||||||
let consumed = decoder.position() as usize;
|
let consumed = decoder.position() as usize;
|
||||||
|
|
|
@ -58,10 +58,7 @@ impl Address {
|
||||||
if addr[0..3] == ONION {
|
if addr[0..3] == ONION {
|
||||||
return Err(io::Error::from(io::ErrorKind::AddrNotAvailable));
|
return Err(io::Error::from(io::ErrorKind::AddrNotAvailable));
|
||||||
}
|
}
|
||||||
let ipv6 = Ipv6Addr::new(
|
let ipv6 = Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]);
|
||||||
addr[0],addr[1],addr[2],addr[3],
|
|
||||||
addr[4],addr[5],addr[6],addr[7]
|
|
||||||
);
|
|
||||||
if let Some(ipv4) = ipv6.to_ipv4() {
|
if let Some(ipv4) = ipv6.to_ipv4() {
|
||||||
Ok(SocketAddr::V4(SocketAddrV4::new(ipv4, self.port)))
|
Ok(SocketAddr::V4(SocketAddrV4::new(ipv4, self.port)))
|
||||||
} else {
|
} else {
|
||||||
|
@ -190,10 +187,7 @@ impl Decodable for AddrV2 {
|
||||||
if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] {
|
if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] {
|
||||||
return Err(encode::Error::ParseFailed("IPV4 wrapped address sent with IPv6 network id"));
|
return Err(encode::Error::ParseFailed("IPV4 wrapped address sent with IPv6 network id"));
|
||||||
}
|
}
|
||||||
AddrV2::Ipv6(Ipv6Addr::new(
|
AddrV2::Ipv6(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
|
||||||
addr[0],addr[1],addr[2],addr[3],
|
|
||||||
addr[4],addr[5],addr[6],addr[7]
|
|
||||||
))
|
|
||||||
},
|
},
|
||||||
3 => {
|
3 => {
|
||||||
if len != 10 {
|
if len != 10 {
|
||||||
|
@ -226,10 +220,7 @@ impl Decodable for AddrV2 {
|
||||||
return Err(encode::Error::ParseFailed("Invalid CJDNS address"));
|
return Err(encode::Error::ParseFailed("Invalid CJDNS address"));
|
||||||
}
|
}
|
||||||
let addr = addr_to_be(addr);
|
let addr = addr_to_be(addr);
|
||||||
AddrV2::Cjdns(Ipv6Addr::new(
|
AddrV2::Cjdns(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]))
|
||||||
addr[0],addr[1],addr[2],addr[3],
|
|
||||||
addr[4],addr[5],addr[6],addr[7]
|
|
||||||
))
|
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
// len already checked above to be <= 512
|
// len already checked above to be <= 512
|
||||||
|
|
|
@ -293,39 +293,16 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn serialize_test() {
|
fn serialize_test() {
|
||||||
assert_eq!(
|
assert_eq!(serialize(&Network::Bitcoin.magic()), &[0xf9, 0xbe, 0xb4, 0xd9]);
|
||||||
serialize(&Network::Bitcoin.magic()),
|
assert_eq!(serialize(&Network::Testnet.magic()), &[0x0b, 0x11, 0x09, 0x07]);
|
||||||
&[0xf9, 0xbe, 0xb4, 0xd9]
|
assert_eq!(serialize(&Network::Signet.magic()), &[0x0a, 0x03, 0xcf, 0x40]);
|
||||||
);
|
assert_eq!(serialize(&Network::Regtest.magic()), &[0xfa, 0xbf, 0xb5, 0xda]);
|
||||||
assert_eq!(
|
|
||||||
serialize(&Network::Testnet.magic()),
|
assert_eq!(deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(), Some(Network::Bitcoin.magic()));
|
||||||
&[0x0b, 0x11, 0x09, 0x07]
|
assert_eq!(deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(), Some(Network::Testnet.magic()));
|
||||||
);
|
assert_eq!(deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(), Some(Network::Signet.magic()));
|
||||||
assert_eq!(
|
assert_eq!(deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(), Some(Network::Regtest.magic()));
|
||||||
serialize(&Network::Signet.magic()),
|
|
||||||
&[0x0a, 0x03, 0xcf, 0x40]
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
serialize(&Network::Regtest.magic()),
|
|
||||||
&[0xfa, 0xbf, 0xb5, 0xda]
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(),
|
|
||||||
Some(Network::Bitcoin.magic())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(),
|
|
||||||
Some(Network::Testnet.magic())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(),
|
|
||||||
Some(Network::Signet.magic())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(),
|
|
||||||
Some(Network::Regtest.magic())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -57,8 +57,7 @@ impl Encodable for Inventory {
|
||||||
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
|
fn consensus_encode<S: io::Write>(&self, mut s: S) -> Result<usize, io::Error> {
|
||||||
macro_rules! encode_inv {
|
macro_rules! encode_inv {
|
||||||
($code:expr, $item:expr) => {
|
($code:expr, $item:expr) => {
|
||||||
u32::consensus_encode(&$code, &mut s)? +
|
u32::consensus_encode(&$code, &mut s)? + $item.consensus_encode(&mut s)?
|
||||||
$item.consensus_encode(&mut s)?
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(match *self {
|
Ok(match *self {
|
||||||
|
|
|
@ -94,17 +94,10 @@ impl fmt::Display for Error {
|
||||||
Error::InvalidWitnessVersion(v) => write!(f, "invalid witness script version: {}", v),
|
Error::InvalidWitnessVersion(v) => write!(f, "invalid witness script version: {}", v),
|
||||||
Error::UnparsableWitnessVersion(_) => write!(f, "incorrect format of a witness version byte"),
|
Error::UnparsableWitnessVersion(_) => write!(f, "incorrect format of a witness version byte"),
|
||||||
Error::MalformedWitnessVersion => f.write_str("bitcoin script opcode does not match any known witness version, the script is malformed"),
|
Error::MalformedWitnessVersion => f.write_str("bitcoin script opcode does not match any known witness version, the script is malformed"),
|
||||||
Error::InvalidWitnessProgramLength(l) => write!(f,
|
Error::InvalidWitnessProgramLength(l) => write!(f, "the witness program must be between 2 and 40 bytes in length: length={}", l),
|
||||||
"the witness program must be between 2 and 40 bytes in length: length={}", l,
|
Error::InvalidSegwitV0ProgramLength(l) => write!(f, "a v0 witness program must be either of length 20 or 32 bytes: length={}", l),
|
||||||
),
|
Error::UncompressedPubkey => write!(f, "an uncompressed pubkey was used where it is not allowed"),
|
||||||
Error::InvalidSegwitV0ProgramLength(l) => write!(f,
|
Error::ExcessiveScriptSize => write!(f, "Script size exceed 520 bytes"),
|
||||||
"a v0 witness program must be either of length 20 or 32 bytes: length={}", l,
|
|
||||||
),
|
|
||||||
Error::UncompressedPubkey => write!(f,
|
|
||||||
"an uncompressed pubkey was used where it is not allowed",
|
|
||||||
),
|
|
||||||
Error::ExcessiveScriptSize => write!(f,
|
|
||||||
"Script size exceed 520 bytes")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -395,14 +388,11 @@ impl Payload {
|
||||||
/// Generates a script pubkey spending to this [Payload].
|
/// Generates a script pubkey spending to this [Payload].
|
||||||
pub fn script_pubkey(&self) -> script::Script {
|
pub fn script_pubkey(&self) -> script::Script {
|
||||||
match *self {
|
match *self {
|
||||||
Payload::PubkeyHash(ref hash) =>
|
Payload::PubkeyHash(ref hash) => script::Script::new_p2pkh(hash),
|
||||||
script::Script::new_p2pkh(hash),
|
Payload::ScriptHash(ref hash) => script::Script::new_p2sh(hash),
|
||||||
Payload::ScriptHash(ref hash) =>
|
Payload::WitnessProgram { version, program: ref prog } => {
|
||||||
script::Script::new_p2sh(hash),
|
script::Script::new_witness_program(version, prog)
|
||||||
Payload::WitnessProgram {
|
}
|
||||||
version,
|
|
||||||
program: ref prog,
|
|
||||||
} => script::Script::new_witness_program(version, prog)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -622,10 +612,7 @@ impl Address {
|
||||||
/// Creates a pay to taproot address from a pre-tweaked output key.
|
/// Creates a pay to taproot address from a pre-tweaked output key.
|
||||||
///
|
///
|
||||||
/// This method is not recommended for use, [`Address::p2tr()`] should be used where possible.
|
/// This method is not recommended for use, [`Address::p2tr()`] should be used where possible.
|
||||||
pub fn p2tr_tweaked(
|
pub fn p2tr_tweaked(output_key: TweakedPublicKey, network: Network) -> Address {
|
||||||
output_key: TweakedPublicKey,
|
|
||||||
network: Network
|
|
||||||
) -> Address {
|
|
||||||
Address {
|
Address {
|
||||||
network,
|
network,
|
||||||
payload: Payload::p2tr_tweaked(output_key),
|
payload: Payload::p2tr_tweaked(output_key),
|
||||||
|
|
|
@ -961,10 +961,7 @@ impl<T> CheckedSum<SignedAmount> for T where T: Iterator<Item = SignedAmount> {
|
||||||
fn checked_sum(mut self) -> Option<SignedAmount> {
|
fn checked_sum(mut self) -> Option<SignedAmount> {
|
||||||
let first = Some(self.next().unwrap_or_default());
|
let first = Some(self.next().unwrap_or_default());
|
||||||
|
|
||||||
self.fold(
|
self.fold(first, |acc, item| acc.and_then(|acc| acc.checked_add(item)))
|
||||||
first,
|
|
||||||
|acc, item| acc.and_then(|acc| acc.checked_add(item))
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1534,10 +1531,7 @@ mod tests {
|
||||||
samt: SignedAmount::from_sat(-123456789),
|
samt: SignedAmount::from_sat(-123456789),
|
||||||
},
|
},
|
||||||
&[
|
&[
|
||||||
serde_test::Token::Struct {
|
serde_test::Token::Struct { name: "T", len: 2 },
|
||||||
name: "T",
|
|
||||||
len: 2,
|
|
||||||
},
|
|
||||||
serde_test::Token::Str("amt"),
|
serde_test::Token::Str("amt"),
|
||||||
serde_test::Token::U64(123456789),
|
serde_test::Token::U64(123456789),
|
||||||
serde_test::Token::Str("samt"),
|
serde_test::Token::Str("samt"),
|
||||||
|
|
|
@ -913,23 +913,14 @@ mod tests {
|
||||||
let mut pk = ExtendedPubKey::from_priv(secp, &sk);
|
let mut pk = ExtendedPubKey::from_priv(secp, &sk);
|
||||||
|
|
||||||
// Check derivation convenience method for ExtendedPrivKey
|
// Check derivation convenience method for ExtendedPrivKey
|
||||||
assert_eq!(
|
assert_eq!(&sk.derive_priv(secp, &path).unwrap().to_string()[..], expected_sk);
|
||||||
&sk.derive_priv(secp, &path).unwrap().to_string()[..],
|
|
||||||
expected_sk
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check derivation convenience method for ExtendedPubKey, should error
|
// Check derivation convenience method for ExtendedPubKey, should error
|
||||||
// appropriately if any ChildNumber is hardened
|
// appropriately if any ChildNumber is hardened
|
||||||
if path.0.iter().any(|cnum| cnum.is_hardened()) {
|
if path.0.iter().any(|cnum| cnum.is_hardened()) {
|
||||||
assert_eq!(
|
assert_eq!(pk.derive_pub(secp, &path), Err(Error::CannotDeriveFromHardenedKey));
|
||||||
pk.derive_pub(secp, &path),
|
|
||||||
Err(Error::CannotDeriveFromHardenedKey)
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
assert_eq!(
|
assert_eq!(&pk.derive_pub(secp, &path).unwrap().to_string()[..], expected_pk);
|
||||||
&pk.derive_pub(secp, &path).unwrap().to_string()[..],
|
|
||||||
expected_pk
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive keys, checking hardened and non-hardened derivation one-by-one
|
// Derive keys, checking hardened and non-hardened derivation one-by-one
|
||||||
|
|
|
@ -156,11 +156,7 @@ impl PublicKey {
|
||||||
let mut bytes = [0; 65];
|
let mut bytes = [0; 65];
|
||||||
|
|
||||||
reader.read_exact(&mut bytes[0..1])?;
|
reader.read_exact(&mut bytes[0..1])?;
|
||||||
let bytes = if bytes[0] < 4 {
|
let bytes = if bytes[0] < 4 { &mut bytes[..33] } else { &mut bytes[..65] };
|
||||||
&mut bytes[..33]
|
|
||||||
} else {
|
|
||||||
&mut bytes[..65]
|
|
||||||
};
|
|
||||||
|
|
||||||
reader.read_exact(&mut bytes[1..])?;
|
reader.read_exact(&mut bytes[1..])?;
|
||||||
Self::from_slice(bytes).map_err(|e| {
|
Self::from_slice(bytes).map_err(|e| {
|
||||||
|
@ -285,10 +281,7 @@ impl PrivateKey {
|
||||||
|
|
||||||
/// Deserialize a private key from a slice
|
/// Deserialize a private key from a slice
|
||||||
pub fn from_slice(data: &[u8], network: Network) -> Result<PrivateKey, Error> {
|
pub fn from_slice(data: &[u8], network: Network) -> Result<PrivateKey, Error> {
|
||||||
Ok(PrivateKey::new(
|
Ok(PrivateKey::new(secp256k1::SecretKey::from_slice(data)?, network))
|
||||||
secp256k1::SecretKey::from_slice(data)?,
|
|
||||||
network,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Format the private key to WIF format.
|
/// Format the private key to WIF format.
|
||||||
|
|
|
@ -189,9 +189,7 @@ impl PartialMerkleTree {
|
||||||
}
|
}
|
||||||
// there can never be more hashes provided than one for every txid
|
// there can never be more hashes provided than one for every txid
|
||||||
if self.hashes.len() as u32 > self.num_transactions {
|
if self.hashes.len() as u32 > self.num_transactions {
|
||||||
return Err(BadFormat(
|
return Err(BadFormat("Proof contains more hashes than transactions".to_owned()));
|
||||||
"Proof contains more hashes than transactions".to_owned(),
|
|
||||||
));
|
|
||||||
};
|
};
|
||||||
// there must be at least one bit per node in the partial tree, and at least one node per hash
|
// there must be at least one bit per node in the partial tree, and at least one node per hash
|
||||||
if self.bits.len() < self.hashes.len() {
|
if self.bits.len() < self.hashes.len() {
|
||||||
|
@ -246,13 +244,7 @@ impl PartialMerkleTree {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursive function that traverses tree nodes, storing the data as bits and hashes
|
/// Recursive function that traverses tree nodes, storing the data as bits and hashes
|
||||||
fn traverse_and_build(
|
fn traverse_and_build(&mut self, height: u32, pos: u32, txids: &[Txid], matches: &[bool]) {
|
||||||
&mut self,
|
|
||||||
height: u32,
|
|
||||||
pos: u32,
|
|
||||||
txids: &[Txid],
|
|
||||||
matches: &[bool],
|
|
||||||
) {
|
|
||||||
// Determine whether this node is the parent of at least one matched txid
|
// Determine whether this node is the parent of at least one matched txid
|
||||||
let mut parent_of_match = false;
|
let mut parent_of_match = false;
|
||||||
let mut p = pos << height;
|
let mut p = pos << height;
|
||||||
|
|
|
@ -342,10 +342,7 @@ mod tests {
|
||||||
inputs: vec![],
|
inputs: vec![],
|
||||||
outputs: vec![],
|
outputs: vec![],
|
||||||
};
|
};
|
||||||
assert_eq!(
|
assert_eq!(serialize_hex(&psbt), "70736274ff01000a0200000000000000000000");
|
||||||
serialize_hex(&psbt),
|
|
||||||
"70736274ff01000a0200000000000000000000"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -387,12 +384,8 @@ mod tests {
|
||||||
hd_keypaths.insert(pk.public_key, (fprint, dpath.into()));
|
hd_keypaths.insert(pk.public_key, (fprint, dpath.into()));
|
||||||
|
|
||||||
let expected: Output = Output {
|
let expected: Output = Output {
|
||||||
redeem_script: Some(hex_script!(
|
redeem_script: Some(hex_script!("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac")),
|
||||||
"76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac"
|
witness_script: Some(hex_script!("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787")),
|
||||||
)),
|
|
||||||
witness_script: Some(hex_script!(
|
|
||||||
"a9143545e6e33b832c47050f24d3eeb93c9c03948bc787"
|
|
||||||
)),
|
|
||||||
bip32_derivation: hd_keypaths,
|
bip32_derivation: hd_keypaths,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
@ -438,13 +431,8 @@ mod tests {
|
||||||
version: 0,
|
version: 0,
|
||||||
proprietary: Default::default(),
|
proprietary: Default::default(),
|
||||||
unknown: Default::default(),
|
unknown: Default::default(),
|
||||||
inputs: vec![
|
inputs: vec![Input::default()],
|
||||||
Input::default(),
|
outputs: vec![Output::default(), Output::default()],
|
||||||
],
|
|
||||||
outputs: vec![
|
|
||||||
Output::default(),
|
|
||||||
Output::default()
|
|
||||||
]
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let actual: PartiallySignedTransaction = deserialize(&serialize(&expected)).unwrap();
|
let actual: PartiallySignedTransaction = deserialize(&serialize(&expected)).unwrap();
|
||||||
|
@ -801,8 +789,7 @@ mod tests {
|
||||||
let psbt_non_witness_utxo = (&psbt.inputs[0].non_witness_utxo).as_ref().unwrap();
|
let psbt_non_witness_utxo = (&psbt.inputs[0].non_witness_utxo).as_ref().unwrap();
|
||||||
|
|
||||||
assert_eq!(tx_input.previous_output.txid, psbt_non_witness_utxo.txid());
|
assert_eq!(tx_input.previous_output.txid, psbt_non_witness_utxo.txid());
|
||||||
assert!(
|
assert!(psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
|
||||||
psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize]
|
|
||||||
.script_pubkey
|
.script_pubkey
|
||||||
.is_p2pkh()
|
.is_p2pkh()
|
||||||
);
|
);
|
||||||
|
@ -868,9 +855,7 @@ mod tests {
|
||||||
let tx = &psbt.unsigned_tx;
|
let tx = &psbt.unsigned_tx;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx.txid(),
|
tx.txid(),
|
||||||
Txid::from_hex(
|
Txid::from_hex("75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115").unwrap(),
|
||||||
"75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115"
|
|
||||||
).unwrap()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut unknown: BTreeMap<raw::Key, Vec<u8>> = BTreeMap::new();
|
let mut unknown: BTreeMap<raw::Key, Vec<u8>> = BTreeMap::new();
|
||||||
|
|
|
@ -99,10 +99,7 @@ impl Decodable for Key {
|
||||||
key.push(Decodable::consensus_decode(&mut d)?);
|
key.push(Decodable::consensus_decode(&mut d)?);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Key {
|
Ok(Key { type_value, key })
|
||||||
type_value,
|
|
||||||
key,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,11 +150,7 @@ impl<Subtype> Decodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u
|
||||||
let subtype = Subtype::from(d.read_u8()?);
|
let subtype = Subtype::from(d.read_u8()?);
|
||||||
let key = read_to_end(d)?;
|
let key = read_to_end(d)?;
|
||||||
|
|
||||||
Ok(ProprietaryKey {
|
Ok(ProprietaryKey { prefix, subtype, key })
|
||||||
prefix,
|
|
||||||
subtype,
|
|
||||||
key
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -598,9 +598,7 @@ impl TaprootMerkleBranch {
|
||||||
if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 {
|
if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 {
|
||||||
Err(TaprootError::InvalidMerkleBranchSize(sl.len()))
|
Err(TaprootError::InvalidMerkleBranchSize(sl.len()))
|
||||||
} else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT {
|
} else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT {
|
||||||
Err(TaprootError::InvalidMerkleTreeDepth(
|
Err(TaprootError::InvalidMerkleTreeDepth(sl.len() / TAPROOT_CONTROL_NODE_SIZE))
|
||||||
sl.len() / TAPROOT_CONTROL_NODE_SIZE,
|
|
||||||
))
|
|
||||||
} else {
|
} else {
|
||||||
let inner = sl
|
let inner = sl
|
||||||
// TODO: Use chunks_exact after MSRV changes to 1.31
|
// TODO: Use chunks_exact after MSRV changes to 1.31
|
||||||
|
@ -717,8 +715,7 @@ impl ControlBlock {
|
||||||
/// applied when encoding this element as a witness.
|
/// applied when encoding this element as a witness.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut buf = Vec::with_capacity(self.size());
|
let mut buf = Vec::with_capacity(self.size());
|
||||||
self.encode(&mut buf)
|
self.encode(&mut buf).expect("writers don't error");
|
||||||
.expect("writers don't error");
|
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -939,11 +936,9 @@ impl fmt::Display for TaprootBuilderError {
|
||||||
"Attempted to create a tree with two nodes at depth 0. There must\
|
"Attempted to create a tree with two nodes at depth 0. There must\
|
||||||
only be a exactly one node at depth 0",
|
only be a exactly one node at depth 0",
|
||||||
),
|
),
|
||||||
TaprootBuilderError::InvalidMerkleTreeDepth(d) => write!(
|
TaprootBuilderError::InvalidMerkleTreeDepth(d) => {
|
||||||
f,
|
write!(f, "Merkle Tree depth({}) must be less than {}", d, TAPROOT_CONTROL_MAX_NODE_COUNT)
|
||||||
"Merkle Tree depth({}) must be less than {}",
|
}
|
||||||
d, TAPROOT_CONTROL_MAX_NODE_COUNT
|
|
||||||
),
|
|
||||||
TaprootBuilderError::InvalidInternalKey(e) => {
|
TaprootBuilderError::InvalidInternalKey(e) => {
|
||||||
write!(f, "Invalid Internal XOnly key : {}", e)
|
write!(f, "Invalid Internal XOnly key : {}", e)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue