Merge rust-bitcoin/rust-bitcoin#1088: Add BIP152 (Compact Blocks) structures

ee29910911 BIP152: Test net msg ser/der and diff encoding (0xb10c)
cd1aaaf344 BIP152: Add Compact Block unit test based on Elements (Steven Roose)
d4d92a838e BIP152: Add Compact Blocks network messages (Steven Roose)
f2fcdc86e6 BIP152: Add basic Compact Block structures (Steven Roose)
a9a39c4b08 blockdata: Derive PartialOrd, Ord and Hash for BlockHeader (Steven Roose)

Pull request description:

  > Adds the basic structures for BIP152 and a method to create a compact block from a full block.

  This is a rebase of #249 by stevenroose (see https://github.com/rust-bitcoin/rust-bitcoin/pull/249#issuecomment-1170562141) with a milestone for 29.0. I've added deserialization and serialization tests for the network messages and fixed an off-by-one bug in the deserialization of the differentially encoded varints in the `getblocktxn` message (see https://github.com/rust-bitcoin/rust-bitcoin/pull/249#discussion_r914989521).

  Closes #249.

ACKs for top commit:
  tcharding:
    ACK ee29910911
  apoelstra:
    ACK ee29910911

Tree-SHA512: 462a91576281f5a2ffdc2610769ea93970b60dac75a150c827966c48daec7cf93f526f9f202e7ba1dbb1410b49148579880260a3c3df298b98330c0d891a4cca
This commit is contained in:
Andrew Poelstra 2022-07-25 17:34:21 +00:00
commit 5b707069e5
No known key found for this signature in database
GPG Key ID: C588D63CE41B97C1
8 changed files with 574 additions and 1 deletions

View File

@ -37,7 +37,7 @@ use crate::internal_macros::impl_consensus_encoding;
/// ### Bitcoin Core References
///
/// * [CBlockHeader definition](https://github.com/bitcoin/bitcoin/blob/345457b542b6a980ccfbc868af0970a6f91d1b82/src/primitives/block.h#L20)
#[derive(Copy, PartialEq, Eq, Clone, Debug)]
#[derive(Copy, PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "serde", serde(crate = "actual_serde"))]
pub struct BlockHeader {

View File

@ -27,6 +27,7 @@ use crate::io::{self, Cursor, Read};
use crate::util::endian;
use crate::util::psbt;
use crate::util::bip152::{ShortId, PrefilledTransaction};
use crate::util::taproot::TapLeafHash;
use crate::hashes::hex::ToHex;
@ -558,6 +559,7 @@ macro_rules! impl_array {
impl_array!(2);
impl_array!(4);
impl_array!(6);
impl_array!(8);
impl_array!(10);
impl_array!(12);
@ -629,6 +631,9 @@ impl_vec!(TxIn);
impl_vec!(Vec<u8>);
impl_vec!(u64);
impl_vec!(TapLeafHash);
impl_vec!(VarInt);
impl_vec!(ShortId);
impl_vec!(PrefilledTransaction);
#[cfg(feature = "std")] impl_vec!(Inventory);
#[cfg(feature = "std")] impl_vec!((u32, Address));

View File

@ -19,6 +19,7 @@ use crate::network::address::{Address, AddrV2Message};
use crate::network::{message_network, message_bloom};
use crate::network::message_blockdata;
use crate::network::message_filter;
use crate::network::message_compact_blocks;
use crate::consensus::encode::{CheckedData, Decodable, Encodable, VarInt};
use crate::consensus::{encode, serialize};
use crate::util::merkleblock::MerkleBlock;
@ -181,6 +182,14 @@ pub enum NetworkMessage {
GetCFCheckpt(message_filter::GetCFCheckpt),
/// BIP157 cfcheckpt
CFCheckpt(message_filter::CFCheckpt),
/// BIP152 sendcmpct
SendCmpct(message_compact_blocks::SendCmpct),
/// BIP152 cmpctblock
CmpctBlock(message_compact_blocks::CmpctBlock),
/// BIP152 getblocktxn
GetBlockTxn(message_compact_blocks::GetBlockTxn),
/// BIP152 blocktxn
BlockTxn(message_compact_blocks::BlockTxn),
/// `alert`
Alert(Vec<u8>),
/// `reject`
@ -237,6 +246,10 @@ impl NetworkMessage {
NetworkMessage::CFHeaders(_) => "cfheaders",
NetworkMessage::GetCFCheckpt(_) => "getcfcheckpt",
NetworkMessage::CFCheckpt(_) => "cfcheckpt",
NetworkMessage::SendCmpct(_) => "sendcmpct",
NetworkMessage::CmpctBlock(_) => "cmpctblock",
NetworkMessage::GetBlockTxn(_) => "getblocktxn",
NetworkMessage::BlockTxn(_) => "blocktxn",
NetworkMessage::Alert(_) => "alert",
NetworkMessage::Reject(_) => "reject",
NetworkMessage::FeeFilter(_) => "feefilter",
@ -314,6 +327,10 @@ impl Encodable for RawNetworkMessage {
NetworkMessage::CFHeaders(ref dat) => serialize(dat),
NetworkMessage::GetCFCheckpt(ref dat) => serialize(dat),
NetworkMessage::CFCheckpt(ref dat) => serialize(dat),
NetworkMessage::SendCmpct(ref dat) => serialize(dat),
NetworkMessage::CmpctBlock(ref dat) => serialize(dat),
NetworkMessage::GetBlockTxn(ref dat) => serialize(dat),
NetworkMessage::BlockTxn(ref dat) => serialize(dat),
NetworkMessage::Alert(ref dat) => serialize(dat),
NetworkMessage::Reject(ref dat) => serialize(dat),
NetworkMessage::FeeFilter(ref data) => serialize(data),
@ -394,6 +411,10 @@ impl Decodable for RawNetworkMessage {
"reject" => NetworkMessage::Reject(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"alert" => NetworkMessage::Alert(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"feefilter" => NetworkMessage::FeeFilter(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"sendcmpct" => NetworkMessage::SendCmpct(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"cmpctblock" => NetworkMessage::CmpctBlock(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"getblocktxn" => NetworkMessage::GetBlockTxn(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"blocktxn" => NetworkMessage::BlockTxn(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"wtxidrelay" => NetworkMessage::WtxidRelay,
"addrv2" => NetworkMessage::AddrV2(Decodable::consensus_decode_from_finite_reader(&mut mem_d)?),
"sendaddrv2" => NetworkMessage::SendAddrV2,
@ -432,6 +453,8 @@ mod test {
use crate::blockdata::script::Script;
use crate::network::message_bloom::{FilterAdd, FilterLoad, BloomFlags};
use crate::MerkleBlock;
use crate::network::message_compact_blocks::{GetBlockTxn, SendCmpct};
use crate::util::bip152::BlockTransactionsRequest;
fn hash(slice: [u8;32]) -> Hash {
Hash::from_slice(&slice).unwrap()
@ -446,6 +469,9 @@ mod test {
let header: BlockHeader = deserialize(&Vec::from_hex("010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b").unwrap()).unwrap();
let script: Script = deserialize(&Vec::from_hex("1976a91431a420903c05a0a7de2de40c9f02ebedbacdc17288ac").unwrap()).unwrap();
let merkle_block: MerkleBlock = deserialize(&Vec::from_hex("0100000079cda856b143d9db2c1caff01d1aecc8630d30625d10e8b4b8b0000000000000b50cc069d6a3e33e3ff84a5c41d9d3febe7c770fdcc96b2c3ff60abe184f196367291b4d4c86041b8fa45d630100000001b50cc069d6a3e33e3ff84a5c41d9d3febe7c770fdcc96b2c3ff60abe184f19630101").unwrap()).unwrap();
let cmptblock = deserialize(&Vec::from_hex("00000030d923ad36ff2d955abab07f8a0a6e813bc6e066b973e780c5e36674cad5d1cd1f6e265f2a17a0d35cbe701fe9d06e2c6324cfe135f6233e8b767bfa3fb4479b71115dc562ffff7f2006000000000000000000000000010002000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0302ee00ffffffff0100f9029500000000015100000000").unwrap()).unwrap();
let blocktxn = deserialize(&Vec::from_hex("2e93c0cff39ff605020072d96bc3a8d20b8447e294d08092351c8583e08d9b5a01020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0402dc0000ffffffff0200f90295000000001976a9142b4569203694fc997e13f2c0a1383b9e16c77a0d88ac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap()).unwrap();
let msgs = vec![
NetworkMessage::Version(version_msg),
@ -481,6 +507,10 @@ mod test {
NetworkMessage::WtxidRelay,
NetworkMessage::AddrV2(vec![AddrV2Message{ addr: AddrV2::Ipv4(Ipv4Addr::new(127, 0, 0, 1)), port: 0, services: ServiceFlags::NONE, time: 0 }]),
NetworkMessage::SendAddrV2,
NetworkMessage::CmpctBlock(cmptblock),
NetworkMessage::GetBlockTxn(GetBlockTxn { txs_request: BlockTransactionsRequest { block_hash: hash([11u8; 32]).into(), indexes: vec![0, 1, 2, 3, 10, 3002] } }),
NetworkMessage::BlockTxn(blocktxn),
NetworkMessage::SendCmpct(SendCmpct{send_compact: true, version: 8333}),
];
for msg in msgs {

View File

@ -27,6 +27,8 @@ pub enum Inventory {
Transaction(Txid),
/// Block
Block(BlockHash),
/// Compact Block
CompactBlock(BlockHash),
/// Witness Transaction by Wtxid
WTx(Wtxid),
/// Witness Transaction
@ -54,6 +56,7 @@ impl Encodable for Inventory {
Inventory::Error => encode_inv!(0, sha256d::Hash::all_zeros()),
Inventory::Transaction(ref t) => encode_inv!(1, t),
Inventory::Block(ref b) => encode_inv!(2, b),
Inventory::CompactBlock(ref b) => encode_inv!(4, b),
Inventory::WTx(w) => encode_inv!(5, w),
Inventory::WitnessTransaction(ref t) => encode_inv!(0x40000001, t),
Inventory::WitnessBlock(ref b) => encode_inv!(0x40000002, b),
@ -70,6 +73,7 @@ impl Decodable for Inventory {
0 => Inventory::Error,
1 => Inventory::Transaction(Decodable::consensus_decode(r)?),
2 => Inventory::Block(Decodable::consensus_decode(r)?),
4 => Inventory::CompactBlock(Decodable::consensus_decode(r)?),
5 => Inventory::WTx(Decodable::consensus_decode(r)?),
0x40000001 => Inventory::WitnessTransaction(Decodable::consensus_decode(r)?),
0x40000002 => Inventory::WitnessBlock(Decodable::consensus_decode(r)?),

View File

@ -0,0 +1,45 @@
//!
//! BIP152 Compact Blocks network messages
//!
use crate::internal_macros::impl_consensus_encoding;
use crate::util::bip152;
/// sendcmpct message
#[derive(PartialEq, Eq, Clone, Debug, Copy, PartialOrd, Ord, Hash)]
pub struct SendCmpct {
/// Request to be send compact blocks.
pub send_compact: bool,
/// Compact Blocks protocol version number.
pub version: u64,
}
impl_consensus_encoding!(SendCmpct, send_compact, version);
/// cmpctblock message
///
/// Note that the rules for validation before relaying compact blocks is
/// different from headers and regular block messages. Thus, you shouldn't use
/// compact blocks when relying on an upstream full node to have validated data
/// being forwarded to you.
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct CmpctBlock {
/// The Compact Block.
pub compact_block: bip152::HeaderAndShortIds,
}
impl_consensus_encoding!(CmpctBlock, compact_block);
/// getblocktxn message
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct GetBlockTxn {
/// The block transactions request.
pub txs_request: bip152::BlockTransactionsRequest,
}
impl_consensus_encoding!(GetBlockTxn, txs_request);
/// blocktxn message
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct BlockTxn {
/// The requested block transactions.
pub transactions: bip152::BlockTransactions,
}
impl_consensus_encoding!(BlockTxn, transactions);

View File

@ -26,6 +26,9 @@ pub mod message_blockdata;
pub mod message_bloom;
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub mod message_compact_blocks;
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]
pub mod message_network;
#[cfg(feature = "std")]
#[cfg_attr(docsrs, doc(cfg(feature = "std")))]

485
src/util/bip152.rs Normal file
View File

@ -0,0 +1,485 @@
// SPDX-License-Identifier: CC0-1.0
//! BIP152 Compact Blocks
//!
//! Implementation of compact blocks data structure and algorithms.
//!
use crate::prelude::*;
use crate::io;
use core::{convert, convert::TryFrom, fmt, mem};
#[cfg(feature = "std")]
use std::error;
use crate::consensus::encode::{self, Decodable, Encodable, VarInt};
use crate::hashes::{sha256, siphash24, Hash};
use crate::internal_macros::{impl_array_newtype, impl_bytes_newtype, impl_consensus_encoding};
use crate::util::endian;
use crate::{Block, BlockHash, BlockHeader, Transaction};
/// A BIP-152 error
#[derive(Clone, PartialEq, Eq, Debug, Copy, PartialOrd, Ord, Hash)]
pub enum Error {
/// An unknown version number was used.
UnknownVersion,
/// The prefill slice provided was invalid.
InvalidPrefill,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::UnknownVersion => write!(f, "an unknown version number was used"),
Error::InvalidPrefill => write!(f, "the prefill slice provided was invalid"),
}
}
}
#[cfg(feature = "std")]
impl error::Error for Error {}
/// A [PrefilledTransaction] structure is used in [HeaderAndShortIds] to
/// provide a list of a few transactions explicitly.
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct PrefilledTransaction {
/// The index of the transaction in the block.
///
/// This field is differentially encoded relative to the previous
/// prefilled transaction as described as follows:
///
/// > Several uses of CompactSize below are "differentially encoded". For
/// > these, instead of using raw indexes, the number encoded is the
/// > difference between the current index and the previous index, minus one.
/// > For example, a first index of 0 implies a real index of 0, a second
/// > index of 0 thereafter refers to a real index of 1, etc.
pub idx: u16,
/// The actual transaction.
pub tx: Transaction,
}
impl convert::AsRef<Transaction> for PrefilledTransaction {
fn as_ref(&self) -> &Transaction {
&self.tx
}
}
impl Encodable for PrefilledTransaction {
#[inline]
fn consensus_encode<S: io::Write + ?Sized>(&self, mut s: &mut S) -> Result<usize, io::Error> {
Ok(VarInt(self.idx as u64).consensus_encode(&mut s)? + self.tx.consensus_encode(&mut s)?)
}
}
impl Decodable for PrefilledTransaction {
#[inline]
fn consensus_decode<D: io::Read + ?Sized>(
mut d: &mut D,
) -> Result<PrefilledTransaction, encode::Error> {
let idx = VarInt::consensus_decode(&mut d)?.0;
let idx = u16::try_from(idx)
.map_err(|_| encode::Error::ParseFailed("BIP152 prefilled tx index out of bounds"))?;
let tx = Transaction::consensus_decode(&mut d)?;
Ok(PrefilledTransaction { idx, tx })
}
}
/// Short transaction IDs are used to represent a transaction without sending a full 256-bit hash.
#[derive(PartialEq, Eq, Clone, Copy, Hash, Default, PartialOrd, Ord)]
pub struct ShortId([u8; 6]);
impl_array_newtype!(ShortId, u8, 6);
impl_bytes_newtype!(ShortId, 6);
impl ShortId {
/// Calculate the SipHash24 keys used to calculate short IDs.
pub fn calculate_siphash_keys(header: &BlockHeader, nonce: u64) -> (u64, u64) {
// 1. single-SHA256 hashing the block header with the nonce appended (in little-endian)
let h = {
let mut engine = sha256::Hash::engine();
header.consensus_encode(&mut engine).expect("engines don't error");
nonce.consensus_encode(&mut engine).expect("engines don't error");
sha256::Hash::from_engine(engine)
};
// 2. Running SipHash-2-4 with the input being the transaction ID and the keys (k0/k1)
// set to the first two little-endian 64-bit integers from the above hash, respectively.
(endian::slice_to_u64_le(&h[0..8]), endian::slice_to_u64_le(&h[8..16]))
}
/// Calculate the short ID with the given (w)txid and using the provided SipHash keys.
pub fn with_siphash_keys<T: AsRef<[u8]>>(txid: &T, siphash_keys: (u64, u64)) -> ShortId {
// 2. Running SipHash-2-4 with the input being the transaction ID and the keys (k0/k1)
// set to the first two little-endian 64-bit integers from the above hash, respectively.
let hash = siphash24::Hash::hash_with_keys(siphash_keys.0, siphash_keys.1, txid.as_ref());
// 3. Dropping the 2 most significant bytes from the SipHash output to make it 6 bytes.
let mut id = ShortId([0; 6]);
id.0.copy_from_slice(&hash[0..6]);
id
}
}
impl Encodable for ShortId {
#[inline]
fn consensus_encode<S: io::Write + ?Sized>(&self, s: &mut S) -> Result<usize, io::Error> {
self.0.consensus_encode(s)
}
}
impl Decodable for ShortId {
#[inline]
fn consensus_decode<D: io::Read + ?Sized>(d: &mut D) -> Result<ShortId, encode::Error> {
Ok(ShortId(Decodable::consensus_decode(d)?))
}
}
/// A [HeaderAndShortIds] structure is used to relay a block header, the short
/// transactions IDs used for matching already-available transactions, and a
/// select few transactions which we expect a peer may be missing.
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct HeaderAndShortIds {
/// The header of the block being provided.
pub header: BlockHeader,
/// A nonce for use in short transaction ID calculations.
pub nonce: u64,
/// The short transaction IDs calculated from the transactions
/// which were not provided explicitly in prefilled_txs.
pub short_ids: Vec<ShortId>,
/// Used to provide the coinbase transaction and a select few
/// which we expect a peer may be missing.
pub prefilled_txs: Vec<PrefilledTransaction>,
}
impl_consensus_encoding!(HeaderAndShortIds, header, nonce, short_ids, prefilled_txs);
impl HeaderAndShortIds {
/// Create a new [HeaderAndShortIds] from a full block.
///
/// The version number must be either 1 or 2.
///
/// The `prefill` slice indicates which transactions should be prefilled in
/// the block. It should contain the indexes in the block of the txs to
/// prefill. It must be ordered. 0 should not be included as the
/// coinbase tx is always prefilled.
///
/// > Nodes SHOULD NOT use the same nonce across multiple different blocks.
pub fn from_block(
block: &Block,
nonce: u64,
version: u32,
mut prefill: &[usize],
) -> Result<HeaderAndShortIds, Error> {
if version != 1 && version != 2 {
return Err(Error::UnknownVersion);
}
let siphash_keys = ShortId::calculate_siphash_keys(&block.header, nonce);
let mut prefilled = Vec::with_capacity(prefill.len() + 1); // +1 for coinbase tx
let mut short_ids = Vec::with_capacity(block.txdata.len() - prefill.len());
let mut last_prefill = 0;
for (idx, tx) in block.txdata.iter().enumerate() {
// Check if we should prefill this tx.
let prefill_tx = if prefill.get(0) == Some(&idx) {
prefill = &prefill[1..];
true
} else {
idx == 0 // Always prefill coinbase.
};
if prefill_tx {
let diff_idx = idx - last_prefill;
last_prefill = idx + 1;
prefilled.push(PrefilledTransaction {
idx: diff_idx as u16,
tx: match version {
// > As encoded in "tx" messages sent in response to getdata MSG_TX
1 => {
// strip witness for version 1
let mut no_witness = tx.clone();
no_witness.input.iter_mut().for_each(|i| i.witness.clear());
no_witness
}
// > Transactions inside cmpctblock messages (both those used as direct
// > announcement and those in response to getdata) and in blocktxn should
// > include witness data, using the same format as responses to getdata
// > MSG_WITNESS_TX, specified in BIP144.
2 => tx.clone(),
_ => unreachable!(),
},
});
} else {
short_ids.push(ShortId::with_siphash_keys(
&match version {
1 => tx.txid().as_hash(),
2 => tx.wtxid().as_hash(),
_ => unreachable!(),
},
siphash_keys,
));
}
}
if !prefill.is_empty() {
return Err(Error::InvalidPrefill);
}
Ok(HeaderAndShortIds {
header: block.header,
nonce,
// Provide coinbase prefilled.
prefilled_txs: prefilled,
short_ids,
})
}
}
/// A [BlockTransactionsRequest] structure is used to list transaction indexes
/// in a block being requested.
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct BlockTransactionsRequest {
/// The blockhash of the block which the transactions being requested are in.
pub block_hash: BlockHash,
/// The indexes of the transactions being requested in the block.
///
/// Warning: Encoding panics with [`u64::MAX`] values. See [`BlockTransactionsRequest::consensus_encode()`]
pub indexes: Vec<u64>,
}
impl Encodable for BlockTransactionsRequest {
/// # Panics
///
/// Panics if the index overflows [`u64::MAX`]. This happens when [`BlockTransactionsRequest::indexes`]
/// contains an entry with the value [`u64::MAX`] as `u64` overflows during differential encoding.
fn consensus_encode<S: io::Write + ?Sized>(&self, mut s: &mut S) -> Result<usize, io::Error> {
let mut len = self.block_hash.consensus_encode(&mut s)?;
// Manually encode indexes because they are differentially encoded VarInts.
len += VarInt(self.indexes.len() as u64).consensus_encode(&mut s)?;
let mut last_idx = 0;
for idx in &self.indexes {
len += VarInt(*idx - last_idx).consensus_encode(&mut s)?;
last_idx = *idx + 1; // can panic here
}
Ok(len)
}
}
impl Decodable for BlockTransactionsRequest {
fn consensus_decode<D: io::Read + ?Sized>(
mut d: &mut D,
) -> Result<BlockTransactionsRequest, encode::Error> {
Ok(BlockTransactionsRequest {
block_hash: BlockHash::consensus_decode(&mut d)?,
indexes: {
// Manually decode indexes because they are differentially encoded VarInts.
let nb_indexes = VarInt::consensus_decode(&mut d)?.0 as usize;
// Since the number of indices ultimately represent transactions,
// we can limit the number of indices to the maximum number of
// transactions that would be allowed in a vector.
let byte_size = (nb_indexes as usize)
.checked_mul(mem::size_of::<Transaction>())
.ok_or(encode::Error::ParseFailed("Invalid length"))?;
if byte_size > encode::MAX_VEC_SIZE {
return Err(encode::Error::OversizedVectorAllocation {
requested: byte_size,
max: encode::MAX_VEC_SIZE,
});
}
let mut indexes = Vec::with_capacity(nb_indexes);
let mut last_index: u64 = 0;
for _ in 0..nb_indexes {
let differential: VarInt = Decodable::consensus_decode(&mut d)?;
last_index = match last_index.checked_add(differential.0) {
Some(r) => r,
None => return Err(encode::Error::ParseFailed("block index overflow")),
};
indexes.push(last_index);
last_index = match last_index.checked_add(1) {
Some(r) => r,
None => return Err(encode::Error::ParseFailed("block index overflow")),
};
}
indexes
},
})
}
}
/// A transaction index is requested that is out of range from the
/// corresponding block.
#[derive(Clone, PartialEq, Eq, Debug, Copy, PartialOrd, Ord, Hash)]
pub struct TxIndexOutOfRangeError(u64);
impl fmt::Display for TxIndexOutOfRangeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"a transaction index is requested that is \
out of range from the corresponding block: {}",
self.0,
)
}
}
#[cfg(feature = "std")]
impl error::Error for TxIndexOutOfRangeError {}
/// A [BlockTransactions] structure is used to provide some of the transactions
/// in a block, as requested.
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
pub struct BlockTransactions {
/// The blockhash of the block which the transactions being provided are in.
pub block_hash: BlockHash,
/// The transactions provided.
pub transactions: Vec<Transaction>,
}
impl_consensus_encoding!(BlockTransactions, block_hash, transactions);
impl BlockTransactions {
/// Construct a [BlockTransactions] from a [BlockTransactionsRequest] and
/// the corresponsing full [Block] by providing all requested transactions.
pub fn from_request(
request: &BlockTransactionsRequest,
block: &Block,
) -> Result<BlockTransactions, TxIndexOutOfRangeError> {
Ok(BlockTransactions {
block_hash: request.block_hash,
transactions: {
let mut txs = Vec::with_capacity(request.indexes.len());
for idx in &request.indexes {
if *idx >= block.txdata.len() as u64 {
return Err(TxIndexOutOfRangeError(*idx));
}
txs.push(block.txdata[*idx as usize].clone());
}
txs
},
})
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::consensus::encode::{deserialize, serialize};
use crate::hashes::hex::FromHex;
use crate::{
Block, BlockHash, BlockHeader, OutPoint, Script, Sequence, Transaction, TxIn, TxMerkleNode,
TxOut, Txid, Witness,
};
fn dummy_tx(nonce: &[u8]) -> Transaction {
Transaction {
version: 1,
lock_time: 2,
input: vec![TxIn {
previous_output: OutPoint::new(Txid::hash(nonce), 0),
script_sig: Script::new(),
sequence: Sequence(1),
witness: Witness::new(),
}],
output: vec![TxOut { value: 1, script_pubkey: Script::new() }],
}
}
fn dummy_block() -> Block {
Block {
header: BlockHeader {
version: 1,
prev_blockhash: BlockHash::hash(&[0]),
merkle_root: TxMerkleNode::hash(&[1]),
time: 2,
bits: 3,
nonce: 4,
},
txdata: vec![dummy_tx(&[2]), dummy_tx(&[3]), dummy_tx(&[4])],
}
}
#[test]
fn test_header_and_short_ids_from_block() {
let block = dummy_block();
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[]).unwrap();
assert_eq!(compact.nonce, 42);
assert_eq!(compact.short_ids.len(), 2);
assert_eq!(compact.prefilled_txs.len(), 1);
assert_eq!(compact.prefilled_txs[0].idx, 0);
assert_eq!(&compact.prefilled_txs[0].tx, &block.txdata[0]);
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[0, 1, 2]).unwrap();
let idxs = compact.prefilled_txs.iter().map(|t| t.idx).collect::<Vec<_>>();
assert_eq!(idxs, vec![0, 0, 0]);
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[2]).unwrap();
let idxs = compact.prefilled_txs.iter().map(|t| t.idx).collect::<Vec<_>>();
assert_eq!(idxs, vec![0, 1]);
}
#[test]
fn test_compact_block_vector() {
// Tested with Elements implementation of compact blocks.
let raw_block = Vec::<u8>::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f200000000002020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000020000000001021fc20ba2bd745507b8e00679e3b362558f9457db374ca28ffa5243f4c23a4d5f00000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff915d689be87b43337f42e26033df59807b768223368f189a023d0242d837768900000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff0200cdf5050000000017a9146803c72d9154a6a20f404bed6d3dcee07986235a8700e1f5050000000017a9144e6a4c7cb5b5562904843bdf816342f4db9f5797870247304402205e9bf6e70eb0e4b495bf483fd8e6e02da64900f290ef8aaa64bb32600d973c450220670896f5d0e5f33473e5f399ab680cc1d25c2d2afd15abd722f04978f28be887012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf2702473044022045ac80596a6ac9c8c572f94708709adaf106677221122e08daf8b9741a04f66a022003ccd52a3b78f8fd08058fc04fc0cffa5f4c196c84eae9e37e2a85babe731b57012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf276a000000").unwrap();
let raw_compact = Vec::<u8>::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f2000000000a4df3c3744da89fa010a6979e971450100020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
let block: Block = deserialize(&raw_block).unwrap();
let nonce = 18053200567810711460;
let compact = HeaderAndShortIds::from_block(&block, nonce, 2, &[]).unwrap();
let compact_expected = deserialize(&raw_compact).unwrap();
assert_eq!(compact, compact_expected);
}
#[test]
fn test_getblocktx_differential_encoding_de_and_serialization() {
let testcases = vec![
// differentially encoded VarInts, indicies
(vec![4, 0, 5, 1, 10], vec![0, 6, 8, 19]),
(vec![1, 0], vec![0]),
(vec![5, 0, 0, 0, 0, 0], vec![0, 1, 2, 3, 4]),
(vec![3, 1, 1, 1], vec![1, 3, 5]),
(vec![3, 0, 0, 253, 0, 1], vec![0, 1, 258]), // .., 253, 0, 1] == VarInt(256)
];
let deser_errorcases = vec![
vec![2, 255, 254, 255, 255, 255, 255, 255, 255, 255, 0], // .., 255, 254, .., 255] == VarInt(u64::MAX-1)
vec![1, 255, 255, 255, 255, 255, 255, 255, 255, 255], // .., 255, 255, .., 255] == VarInt(u64::MAX)
];
for testcase in testcases {
{
// test deserialization
let mut raw: Vec<u8> = [0u8; 32].to_vec();
raw.extend(testcase.0.clone());
let btr: BlockTransactionsRequest = deserialize(&raw.to_vec()).unwrap();
assert_eq!(testcase.1, btr.indexes);
}
{
// test serialization
let raw: Vec<u8> = serialize(&BlockTransactionsRequest {
block_hash: Hash::all_zeros(),
indexes: testcase.1,
});
let mut expected_raw: Vec<u8> = [0u8; 32].to_vec();
expected_raw.extend(testcase.0);
assert_eq!(expected_raw, raw);
}
}
for errorcase in deser_errorcases {
{
// test that we return Err() if deserialization fails (and don't panic)
let mut raw: Vec<u8> = [0u8; 32].to_vec();
raw.extend(errorcase);
assert!(deserialize::<BlockTransactionsRequest>(&raw.to_vec()).is_err());
}
}
}
#[test]
#[should_panic] // 'attempt to add with overflow' in consensus_encode()
fn test_getblocktx_panic_when_encoding_u64_max() {
serialize(&BlockTransactionsRequest {
block_hash: Hash::all_zeros(),
indexes: vec![core::u64::MAX],
});
}
}

View File

@ -14,6 +14,7 @@ pub mod amount;
pub mod base58;
pub mod bip32;
pub mod bip143;
pub mod bip152;
pub mod hash;
pub mod merkleblock;
pub mod misc;