2019-03-24 15:20:46 +00:00
|
|
|
// SPDX-License-Identifier: CC0-1.0
|
|
|
|
|
|
|
|
//! BIP152 Compact Blocks
|
|
|
|
//!
|
|
|
|
//! Implementation of compact blocks data structure and algorithms.
|
|
|
|
//!
|
|
|
|
|
2022-08-31 04:45:14 +00:00
|
|
|
use core::convert::{TryFrom, TryInto};
|
2022-10-18 19:41:10 +00:00
|
|
|
use core::{convert, fmt, mem};
|
2019-03-24 15:20:46 +00:00
|
|
|
#[cfg(feature = "std")]
|
|
|
|
use std::error;
|
|
|
|
|
2023-03-22 03:09:58 +00:00
|
|
|
use hashes::{sha256, siphash24, Hash};
|
2023-03-28 01:16:47 +00:00
|
|
|
use internals::impl_array_newtype;
|
2022-09-20 04:39:20 +00:00
|
|
|
|
2019-03-24 15:20:46 +00:00
|
|
|
use crate::consensus::encode::{self, Decodable, Encodable, VarInt};
|
2022-09-20 04:39:20 +00:00
|
|
|
use crate::internal_macros::{impl_bytes_newtype, impl_consensus_encoding};
|
2022-10-18 19:41:10 +00:00
|
|
|
use crate::prelude::*;
|
2022-11-15 23:22:16 +00:00
|
|
|
use crate::{block, io, Block, BlockHash, Transaction};
|
2019-03-24 15:20:46 +00:00
|
|
|
|
|
|
|
/// A BIP-152 error
|
2023-07-27 01:10:22 +00:00
|
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
2022-07-28 01:54:54 +00:00
|
|
|
#[non_exhaustive]
|
2019-03-24 15:20:46 +00:00
|
|
|
pub enum Error {
|
|
|
|
/// An unknown version number was used.
|
|
|
|
UnknownVersion,
|
|
|
|
/// The prefill slice provided was invalid.
|
|
|
|
InvalidPrefill,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Display for Error {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
match *self {
|
|
|
|
Error::UnknownVersion => write!(f, "an unknown version number was used"),
|
|
|
|
Error::InvalidPrefill => write!(f, "the prefill slice provided was invalid"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "std")]
|
2022-07-28 01:57:14 +00:00
|
|
|
impl std::error::Error for Error {
|
|
|
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
|
|
|
use self::Error::*;
|
|
|
|
|
|
|
|
match *self {
|
|
|
|
UnknownVersion | InvalidPrefill => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-03-24 15:20:46 +00:00
|
|
|
|
|
|
|
/// A [PrefilledTransaction] structure is used in [HeaderAndShortIds] to
|
|
|
|
/// provide a list of a few transactions explicitly.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
|
|
|
|
pub struct PrefilledTransaction {
|
|
|
|
/// The index of the transaction in the block.
|
|
|
|
///
|
|
|
|
/// This field is differentially encoded relative to the previous
|
|
|
|
/// prefilled transaction as described as follows:
|
|
|
|
///
|
|
|
|
/// > Several uses of CompactSize below are "differentially encoded". For
|
|
|
|
/// > these, instead of using raw indexes, the number encoded is the
|
|
|
|
/// > difference between the current index and the previous index, minus one.
|
|
|
|
/// > For example, a first index of 0 implies a real index of 0, a second
|
|
|
|
/// > index of 0 thereafter refers to a real index of 1, etc.
|
|
|
|
pub idx: u16,
|
|
|
|
/// The actual transaction.
|
|
|
|
pub tx: Transaction,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl convert::AsRef<Transaction> for PrefilledTransaction {
|
2022-10-18 19:41:10 +00:00
|
|
|
fn as_ref(&self) -> &Transaction { &self.tx }
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for PrefilledTransaction {
|
|
|
|
#[inline]
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_encode<W: io::Write + ?Sized>(&self, w: &mut W) -> Result<usize, io::Error> {
|
|
|
|
Ok(VarInt::from(self.idx).consensus_encode(w)? + self.tx.consensus_encode(w)?)
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for PrefilledTransaction {
|
|
|
|
#[inline]
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_decode<R: io::Read + ?Sized>(r: &mut R) -> Result<Self, encode::Error> {
|
|
|
|
let idx = VarInt::consensus_decode(r)?.0;
|
2019-03-24 15:20:46 +00:00
|
|
|
let idx = u16::try_from(idx)
|
|
|
|
.map_err(|_| encode::Error::ParseFailed("BIP152 prefilled tx index out of bounds"))?;
|
2023-05-31 09:12:38 +00:00
|
|
|
let tx = Transaction::consensus_decode(r)?;
|
2019-03-24 15:20:46 +00:00
|
|
|
Ok(PrefilledTransaction { idx, tx })
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Short transaction IDs are used to represent a transaction without sending a full 256-bit hash.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Copy, Hash, Default, PartialOrd, Ord)]
|
|
|
|
pub struct ShortId([u8; 6]);
|
|
|
|
impl_array_newtype!(ShortId, u8, 6);
|
|
|
|
impl_bytes_newtype!(ShortId, 6);
|
|
|
|
|
|
|
|
impl ShortId {
|
|
|
|
/// Calculate the SipHash24 keys used to calculate short IDs.
|
2022-10-27 01:29:34 +00:00
|
|
|
pub fn calculate_siphash_keys(header: &block::Header, nonce: u64) -> (u64, u64) {
|
2019-03-24 15:20:46 +00:00
|
|
|
// 1. single-SHA256 hashing the block header with the nonce appended (in little-endian)
|
|
|
|
let h = {
|
|
|
|
let mut engine = sha256::Hash::engine();
|
|
|
|
header.consensus_encode(&mut engine).expect("engines don't error");
|
|
|
|
nonce.consensus_encode(&mut engine).expect("engines don't error");
|
|
|
|
sha256::Hash::from_engine(engine)
|
|
|
|
};
|
|
|
|
|
|
|
|
// 2. Running SipHash-2-4 with the input being the transaction ID and the keys (k0/k1)
|
|
|
|
// set to the first two little-endian 64-bit integers from the above hash, respectively.
|
2022-11-15 23:22:16 +00:00
|
|
|
(
|
|
|
|
u64::from_le_bytes(h[0..8].try_into().expect("8 byte slice")),
|
|
|
|
u64::from_le_bytes(h[8..16].try_into().expect("8 byte slice")),
|
|
|
|
)
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Calculate the short ID with the given (w)txid and using the provided SipHash keys.
|
|
|
|
pub fn with_siphash_keys<T: AsRef<[u8]>>(txid: &T, siphash_keys: (u64, u64)) -> ShortId {
|
|
|
|
// 2. Running SipHash-2-4 with the input being the transaction ID and the keys (k0/k1)
|
|
|
|
// set to the first two little-endian 64-bit integers from the above hash, respectively.
|
|
|
|
let hash = siphash24::Hash::hash_with_keys(siphash_keys.0, siphash_keys.1, txid.as_ref());
|
|
|
|
|
|
|
|
// 3. Dropping the 2 most significant bytes from the SipHash output to make it 6 bytes.
|
|
|
|
let mut id = ShortId([0; 6]);
|
|
|
|
id.0.copy_from_slice(&hash[0..6]);
|
|
|
|
id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for ShortId {
|
|
|
|
#[inline]
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_encode<W: io::Write + ?Sized>(&self, w: &mut W) -> Result<usize, io::Error> {
|
|
|
|
self.0.consensus_encode(w)
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for ShortId {
|
|
|
|
#[inline]
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_decode<R: io::Read + ?Sized>(r: &mut R) -> Result<ShortId, encode::Error> {
|
|
|
|
Ok(ShortId(Decodable::consensus_decode(r)?))
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A [HeaderAndShortIds] structure is used to relay a block header, the short
|
|
|
|
/// transactions IDs used for matching already-available transactions, and a
|
|
|
|
/// select few transactions which we expect a peer may be missing.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
|
|
|
|
pub struct HeaderAndShortIds {
|
|
|
|
/// The header of the block being provided.
|
2022-10-27 01:29:34 +00:00
|
|
|
pub header: block::Header,
|
2019-03-24 15:20:46 +00:00
|
|
|
/// A nonce for use in short transaction ID calculations.
|
|
|
|
pub nonce: u64,
|
|
|
|
/// The short transaction IDs calculated from the transactions
|
|
|
|
/// which were not provided explicitly in prefilled_txs.
|
|
|
|
pub short_ids: Vec<ShortId>,
|
|
|
|
/// Used to provide the coinbase transaction and a select few
|
|
|
|
/// which we expect a peer may be missing.
|
|
|
|
pub prefilled_txs: Vec<PrefilledTransaction>,
|
|
|
|
}
|
|
|
|
impl_consensus_encoding!(HeaderAndShortIds, header, nonce, short_ids, prefilled_txs);
|
|
|
|
|
|
|
|
impl HeaderAndShortIds {
|
|
|
|
/// Create a new [HeaderAndShortIds] from a full block.
|
|
|
|
///
|
|
|
|
/// The version number must be either 1 or 2.
|
|
|
|
///
|
|
|
|
/// The `prefill` slice indicates which transactions should be prefilled in
|
|
|
|
/// the block. It should contain the indexes in the block of the txs to
|
|
|
|
/// prefill. It must be ordered. 0 should not be included as the
|
|
|
|
/// coinbase tx is always prefilled.
|
|
|
|
///
|
|
|
|
/// > Nodes SHOULD NOT use the same nonce across multiple different blocks.
|
|
|
|
pub fn from_block(
|
|
|
|
block: &Block,
|
|
|
|
nonce: u64,
|
|
|
|
version: u32,
|
|
|
|
mut prefill: &[usize],
|
|
|
|
) -> Result<HeaderAndShortIds, Error> {
|
|
|
|
if version != 1 && version != 2 {
|
|
|
|
return Err(Error::UnknownVersion);
|
|
|
|
}
|
|
|
|
|
|
|
|
let siphash_keys = ShortId::calculate_siphash_keys(&block.header, nonce);
|
|
|
|
|
|
|
|
let mut prefilled = Vec::with_capacity(prefill.len() + 1); // +1 for coinbase tx
|
|
|
|
let mut short_ids = Vec::with_capacity(block.txdata.len() - prefill.len());
|
|
|
|
let mut last_prefill = 0;
|
|
|
|
for (idx, tx) in block.txdata.iter().enumerate() {
|
|
|
|
// Check if we should prefill this tx.
|
2022-08-11 19:46:43 +00:00
|
|
|
let prefill_tx = if prefill.first() == Some(&idx) {
|
2019-03-24 15:20:46 +00:00
|
|
|
prefill = &prefill[1..];
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
idx == 0 // Always prefill coinbase.
|
|
|
|
};
|
|
|
|
|
|
|
|
if prefill_tx {
|
|
|
|
let diff_idx = idx - last_prefill;
|
|
|
|
last_prefill = idx + 1;
|
|
|
|
prefilled.push(PrefilledTransaction {
|
|
|
|
idx: diff_idx as u16,
|
|
|
|
tx: match version {
|
|
|
|
// > As encoded in "tx" messages sent in response to getdata MSG_TX
|
|
|
|
1 => {
|
|
|
|
// strip witness for version 1
|
|
|
|
let mut no_witness = tx.clone();
|
|
|
|
no_witness.input.iter_mut().for_each(|i| i.witness.clear());
|
|
|
|
no_witness
|
|
|
|
}
|
|
|
|
// > Transactions inside cmpctblock messages (both those used as direct
|
|
|
|
// > announcement and those in response to getdata) and in blocktxn should
|
|
|
|
// > include witness data, using the same format as responses to getdata
|
|
|
|
// > MSG_WITNESS_TX, specified in BIP144.
|
|
|
|
2 => tx.clone(),
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
short_ids.push(ShortId::with_siphash_keys(
|
|
|
|
&match version {
|
2023-01-28 22:47:24 +00:00
|
|
|
1 => tx.txid().to_raw_hash(),
|
|
|
|
2 => tx.wtxid().to_raw_hash(),
|
2019-03-24 15:20:46 +00:00
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
siphash_keys,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !prefill.is_empty() {
|
|
|
|
return Err(Error::InvalidPrefill);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(HeaderAndShortIds {
|
|
|
|
header: block.header,
|
|
|
|
nonce,
|
|
|
|
// Provide coinbase prefilled.
|
|
|
|
prefilled_txs: prefilled,
|
|
|
|
short_ids,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A [BlockTransactionsRequest] structure is used to list transaction indexes
|
|
|
|
/// in a block being requested.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
|
|
|
|
pub struct BlockTransactionsRequest {
|
|
|
|
/// The blockhash of the block which the transactions being requested are in.
|
|
|
|
pub block_hash: BlockHash,
|
|
|
|
/// The indexes of the transactions being requested in the block.
|
|
|
|
///
|
|
|
|
/// Warning: Encoding panics with [`u64::MAX`] values. See [`BlockTransactionsRequest::consensus_encode()`]
|
|
|
|
pub indexes: Vec<u64>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for BlockTransactionsRequest {
|
|
|
|
/// # Panics
|
|
|
|
///
|
|
|
|
/// Panics if the index overflows [`u64::MAX`]. This happens when [`BlockTransactionsRequest::indexes`]
|
|
|
|
/// contains an entry with the value [`u64::MAX`] as `u64` overflows during differential encoding.
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_encode<W: io::Write + ?Sized>(&self, w: &mut W) -> Result<usize, io::Error> {
|
|
|
|
let mut len = self.block_hash.consensus_encode(w)?;
|
2019-03-24 15:20:46 +00:00
|
|
|
// Manually encode indexes because they are differentially encoded VarInts.
|
2023-05-31 09:12:38 +00:00
|
|
|
len += VarInt(self.indexes.len() as u64).consensus_encode(w)?;
|
2019-03-24 15:20:46 +00:00
|
|
|
let mut last_idx = 0;
|
|
|
|
for idx in &self.indexes {
|
2023-05-31 09:12:38 +00:00
|
|
|
len += VarInt(*idx - last_idx).consensus_encode(w)?;
|
2019-03-24 15:20:46 +00:00
|
|
|
last_idx = *idx + 1; // can panic here
|
|
|
|
}
|
|
|
|
Ok(len)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for BlockTransactionsRequest {
|
2023-05-31 09:12:38 +00:00
|
|
|
fn consensus_decode<R: io::Read + ?Sized>(r: &mut R) -> Result<Self, encode::Error> {
|
2019-03-24 15:20:46 +00:00
|
|
|
Ok(BlockTransactionsRequest {
|
2023-05-31 09:12:38 +00:00
|
|
|
block_hash: BlockHash::consensus_decode(r)?,
|
2019-03-24 15:20:46 +00:00
|
|
|
indexes: {
|
|
|
|
// Manually decode indexes because they are differentially encoded VarInts.
|
2023-05-31 09:12:38 +00:00
|
|
|
let nb_indexes = VarInt::consensus_decode(r)?.0 as usize;
|
2019-03-24 15:20:46 +00:00
|
|
|
|
|
|
|
// Since the number of indices ultimately represent transactions,
|
|
|
|
// we can limit the number of indices to the maximum number of
|
|
|
|
// transactions that would be allowed in a vector.
|
2022-11-04 00:44:23 +00:00
|
|
|
let byte_size = (nb_indexes)
|
2019-03-24 15:20:46 +00:00
|
|
|
.checked_mul(mem::size_of::<Transaction>())
|
|
|
|
.ok_or(encode::Error::ParseFailed("Invalid length"))?;
|
|
|
|
if byte_size > encode::MAX_VEC_SIZE {
|
|
|
|
return Err(encode::Error::OversizedVectorAllocation {
|
|
|
|
requested: byte_size,
|
|
|
|
max: encode::MAX_VEC_SIZE,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut indexes = Vec::with_capacity(nb_indexes);
|
|
|
|
let mut last_index: u64 = 0;
|
|
|
|
for _ in 0..nb_indexes {
|
2023-05-31 09:12:38 +00:00
|
|
|
let differential: VarInt = Decodable::consensus_decode(r)?;
|
2019-03-24 15:20:46 +00:00
|
|
|
last_index = match last_index.checked_add(differential.0) {
|
2023-05-31 09:12:38 +00:00
|
|
|
Some(i) => i,
|
2019-03-24 15:20:46 +00:00
|
|
|
None => return Err(encode::Error::ParseFailed("block index overflow")),
|
|
|
|
};
|
|
|
|
indexes.push(last_index);
|
|
|
|
last_index = match last_index.checked_add(1) {
|
2023-05-31 09:12:38 +00:00
|
|
|
Some(i) => i,
|
2019-03-24 15:20:46 +00:00
|
|
|
None => return Err(encode::Error::ParseFailed("block index overflow")),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
indexes
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A transaction index is requested that is out of range from the
|
|
|
|
/// corresponding block.
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
|
|
#[non_exhaustive]
|
2019-03-24 15:20:46 +00:00
|
|
|
pub struct TxIndexOutOfRangeError(u64);
|
|
|
|
|
|
|
|
impl fmt::Display for TxIndexOutOfRangeError {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(
|
|
|
|
f,
|
|
|
|
"a transaction index is requested that is \
|
|
|
|
out of range from the corresponding block: {}",
|
|
|
|
self.0,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(feature = "std")]
|
2023-10-04 02:01:43 +00:00
|
|
|
impl error::Error for TxIndexOutOfRangeError {
|
|
|
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { None }
|
|
|
|
}
|
2019-03-24 15:20:46 +00:00
|
|
|
|
|
|
|
/// A [BlockTransactions] structure is used to provide some of the transactions
|
|
|
|
/// in a block, as requested.
|
|
|
|
#[derive(PartialEq, Eq, Clone, Debug, PartialOrd, Ord, Hash)]
|
|
|
|
pub struct BlockTransactions {
|
|
|
|
/// The blockhash of the block which the transactions being provided are in.
|
|
|
|
pub block_hash: BlockHash,
|
|
|
|
/// The transactions provided.
|
|
|
|
pub transactions: Vec<Transaction>,
|
|
|
|
}
|
|
|
|
impl_consensus_encoding!(BlockTransactions, block_hash, transactions);
|
|
|
|
|
|
|
|
impl BlockTransactions {
|
|
|
|
/// Construct a [BlockTransactions] from a [BlockTransactionsRequest] and
|
|
|
|
/// the corresponsing full [Block] by providing all requested transactions.
|
|
|
|
pub fn from_request(
|
|
|
|
request: &BlockTransactionsRequest,
|
|
|
|
block: &Block,
|
|
|
|
) -> Result<BlockTransactions, TxIndexOutOfRangeError> {
|
|
|
|
Ok(BlockTransactions {
|
|
|
|
block_hash: request.block_hash,
|
|
|
|
transactions: {
|
|
|
|
let mut txs = Vec::with_capacity(request.indexes.len());
|
|
|
|
for idx in &request.indexes {
|
|
|
|
if *idx >= block.txdata.len() as u64 {
|
|
|
|
return Err(TxIndexOutOfRangeError(*idx));
|
|
|
|
}
|
|
|
|
txs.push(block.txdata[*idx as usize].clone());
|
|
|
|
}
|
|
|
|
txs
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
2023-07-21 00:38:34 +00:00
|
|
|
use hex::FromHex;
|
2023-03-22 03:09:58 +00:00
|
|
|
|
2019-03-24 15:20:46 +00:00
|
|
|
use super::*;
|
2023-11-07 02:34:13 +00:00
|
|
|
use crate::blockdata::block::TxMerkleNode;
|
2022-08-16 01:24:56 +00:00
|
|
|
use crate::blockdata::locktime::absolute;
|
2023-08-18 01:17:39 +00:00
|
|
|
use crate::blockdata::transaction;
|
2022-07-06 15:05:45 +00:00
|
|
|
use crate::consensus::encode::{deserialize, serialize};
|
2019-03-24 15:20:46 +00:00
|
|
|
use crate::{
|
2023-04-24 14:47:55 +00:00
|
|
|
Amount, CompactTarget, OutPoint, ScriptBuf, Sequence, Transaction, TxIn, TxOut, Txid,
|
|
|
|
Witness,
|
2019-03-24 15:20:46 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
fn dummy_tx(nonce: &[u8]) -> Transaction {
|
|
|
|
Transaction {
|
2023-08-18 01:17:39 +00:00
|
|
|
version: transaction::Version::ONE,
|
2022-10-19 16:51:41 +00:00
|
|
|
lock_time: absolute::LockTime::from_consensus(2),
|
2019-03-24 15:20:46 +00:00
|
|
|
input: vec![TxIn {
|
|
|
|
previous_output: OutPoint::new(Txid::hash(nonce), 0),
|
2022-07-30 12:22:18 +00:00
|
|
|
script_sig: ScriptBuf::new(),
|
2019-03-24 15:20:46 +00:00
|
|
|
sequence: Sequence(1),
|
|
|
|
witness: Witness::new(),
|
|
|
|
}],
|
2023-04-24 14:47:55 +00:00
|
|
|
output: vec![TxOut { value: Amount::ONE_SAT, script_pubkey: ScriptBuf::new() }],
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn dummy_block() -> Block {
|
|
|
|
Block {
|
2022-10-27 01:29:34 +00:00
|
|
|
header: block::Header {
|
2022-10-27 01:55:06 +00:00
|
|
|
version: block::Version::ONE,
|
2019-03-24 15:20:46 +00:00
|
|
|
prev_blockhash: BlockHash::hash(&[0]),
|
|
|
|
merkle_root: TxMerkleNode::hash(&[1]),
|
|
|
|
time: 2,
|
2022-08-16 05:07:59 +00:00
|
|
|
bits: CompactTarget::from_consensus(3),
|
2019-03-24 15:20:46 +00:00
|
|
|
nonce: 4,
|
|
|
|
},
|
|
|
|
txdata: vec![dummy_tx(&[2]), dummy_tx(&[3]), dummy_tx(&[4])],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_header_and_short_ids_from_block() {
|
|
|
|
let block = dummy_block();
|
|
|
|
|
|
|
|
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[]).unwrap();
|
|
|
|
assert_eq!(compact.nonce, 42);
|
|
|
|
assert_eq!(compact.short_ids.len(), 2);
|
|
|
|
assert_eq!(compact.prefilled_txs.len(), 1);
|
|
|
|
assert_eq!(compact.prefilled_txs[0].idx, 0);
|
|
|
|
assert_eq!(&compact.prefilled_txs[0].tx, &block.txdata[0]);
|
|
|
|
|
|
|
|
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[0, 1, 2]).unwrap();
|
|
|
|
let idxs = compact.prefilled_txs.iter().map(|t| t.idx).collect::<Vec<_>>();
|
|
|
|
assert_eq!(idxs, vec![0, 0, 0]);
|
|
|
|
|
|
|
|
let compact = HeaderAndShortIds::from_block(&block, 42, 2, &[2]).unwrap();
|
|
|
|
let idxs = compact.prefilled_txs.iter().map(|t| t.idx).collect::<Vec<_>>();
|
|
|
|
assert_eq!(idxs, vec![0, 1]);
|
|
|
|
}
|
2019-10-17 13:41:18 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_compact_block_vector() {
|
|
|
|
// Tested with Elements implementation of compact blocks.
|
|
|
|
let raw_block = Vec::<u8>::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f200000000002020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000020000000001021fc20ba2bd745507b8e00679e3b362558f9457db374ca28ffa5243f4c23a4d5f00000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff915d689be87b43337f42e26033df59807b768223368f189a023d0242d837768900000000171600147c9dea14ffbcaec4b575e03f05ceb7a81cd3fcbffdffffff0200cdf5050000000017a9146803c72d9154a6a20f404bed6d3dcee07986235a8700e1f5050000000017a9144e6a4c7cb5b5562904843bdf816342f4db9f5797870247304402205e9bf6e70eb0e4b495bf483fd8e6e02da64900f290ef8aaa64bb32600d973c450220670896f5d0e5f33473e5f399ab680cc1d25c2d2afd15abd722f04978f28be887012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf2702473044022045ac80596a6ac9c8c572f94708709adaf106677221122e08daf8b9741a04f66a022003ccd52a3b78f8fd08058fc04fc0cffa5f4c196c84eae9e37e2a85babe731b57012103e4e4d9312b2261af508b367d8ba9be4f01b61d6d6e78bec499845b4f410bcf276a000000").unwrap();
|
|
|
|
let raw_compact = Vec::<u8>::from_hex("000000206c750a364035aefd5f81508a08769975116d9195312ee4520dceac39e1fdc62c4dc67473b8e354358c1e610afeaff7410858bd45df43e2940f8a62bd3d5e3ac943c2975cffff7f2000000000a4df3c3744da89fa010a6979e971450100020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04016b0101ffffffff020006062a0100000001510000000000000000266a24aa21a9ed4a3d9f3343dafcc0d6f6d4310f2ee5ce273ed34edca6c75db3a73e7f368734200120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
|
|
|
|
|
|
|
|
let block: Block = deserialize(&raw_block).unwrap();
|
|
|
|
let nonce = 18053200567810711460;
|
|
|
|
let compact = HeaderAndShortIds::from_block(&block, nonce, 2, &[]).unwrap();
|
|
|
|
let compact_expected = deserialize(&raw_compact).unwrap();
|
|
|
|
|
|
|
|
assert_eq!(compact, compact_expected);
|
|
|
|
}
|
2022-07-06 15:05:45 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_getblocktx_differential_encoding_de_and_serialization() {
|
|
|
|
let testcases = vec![
|
|
|
|
// differentially encoded VarInts, indicies
|
|
|
|
(vec![4, 0, 5, 1, 10], vec![0, 6, 8, 19]),
|
|
|
|
(vec![1, 0], vec![0]),
|
|
|
|
(vec![5, 0, 0, 0, 0, 0], vec![0, 1, 2, 3, 4]),
|
|
|
|
(vec![3, 1, 1, 1], vec![1, 3, 5]),
|
|
|
|
(vec![3, 0, 0, 253, 0, 1], vec![0, 1, 258]), // .., 253, 0, 1] == VarInt(256)
|
|
|
|
];
|
|
|
|
let deser_errorcases = vec![
|
|
|
|
vec![2, 255, 254, 255, 255, 255, 255, 255, 255, 255, 0], // .., 255, 254, .., 255] == VarInt(u64::MAX-1)
|
|
|
|
vec![1, 255, 255, 255, 255, 255, 255, 255, 255, 255], // .., 255, 255, .., 255] == VarInt(u64::MAX)
|
|
|
|
];
|
|
|
|
for testcase in testcases {
|
|
|
|
{
|
|
|
|
// test deserialization
|
|
|
|
let mut raw: Vec<u8> = [0u8; 32].to_vec();
|
|
|
|
raw.extend(testcase.0.clone());
|
|
|
|
let btr: BlockTransactionsRequest = deserialize(&raw.to_vec()).unwrap();
|
|
|
|
assert_eq!(testcase.1, btr.indexes);
|
|
|
|
}
|
|
|
|
{
|
|
|
|
// test serialization
|
|
|
|
let raw: Vec<u8> = serialize(&BlockTransactionsRequest {
|
|
|
|
block_hash: Hash::all_zeros(),
|
|
|
|
indexes: testcase.1,
|
|
|
|
});
|
|
|
|
let mut expected_raw: Vec<u8> = [0u8; 32].to_vec();
|
|
|
|
expected_raw.extend(testcase.0);
|
|
|
|
assert_eq!(expected_raw, raw);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for errorcase in deser_errorcases {
|
|
|
|
{
|
|
|
|
// test that we return Err() if deserialization fails (and don't panic)
|
|
|
|
let mut raw: Vec<u8> = [0u8; 32].to_vec();
|
|
|
|
raw.extend(errorcase);
|
|
|
|
assert!(deserialize::<BlockTransactionsRequest>(&raw.to_vec()).is_err());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
#[should_panic] // 'attempt to add with overflow' in consensus_encode()
|
|
|
|
fn test_getblocktx_panic_when_encoding_u64_max() {
|
|
|
|
serialize(&BlockTransactionsRequest {
|
|
|
|
block_hash: Hash::all_zeros(),
|
|
|
|
indexes: vec![core::u64::MAX],
|
|
|
|
});
|
|
|
|
}
|
2019-03-24 15:20:46 +00:00
|
|
|
}
|