2022-06-29 04:05:31 +00:00
|
|
|
// SPDX-License-Identifier: CC0-1.0
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
// This module was largely copied from https://github.com/rust-bitcoin/murmel/blob/master/src/blockfilter.rs
|
|
|
|
// on 11. June 2019 which is licensed under Apache, that file specifically
|
|
|
|
// was written entirely by Tamas Blummer, who is re-licensing its contents here as CC0.
|
|
|
|
|
2022-08-09 04:50:41 +00:00
|
|
|
//! BIP 158 Compact Block Filters for Light Clients.
|
2019-07-26 07:36:25 +00:00
|
|
|
//!
|
2021-11-05 21:58:18 +00:00
|
|
|
//! This module implements a structure for compact filters on block data, for
|
|
|
|
//! use in the BIP 157 light client protocol. The filter construction proposed
|
|
|
|
//! is an alternative to Bloom filters, as used in BIP 37, that minimizes filter
|
|
|
|
//! size by using Golomb-Rice coding for compression.
|
2019-07-26 07:36:25 +00:00
|
|
|
//!
|
2022-08-09 04:50:41 +00:00
|
|
|
//! ### Relevant BIPS
|
|
|
|
//!
|
|
|
|
//! * [BIP 157 - Client Side Block Filtering](https://github.com/bitcoin/bips/blob/master/bip-0157.mediawiki)
|
|
|
|
//! * [BIP 158 - Compact Block Filters for Light Clients](https://github.com/bitcoin/bips/blob/master/bip-0158.mediawiki)
|
|
|
|
//!
|
|
|
|
//! # Examples
|
2021-02-17 20:35:13 +00:00
|
|
|
//!
|
|
|
|
//! ```ignore
|
2022-07-30 12:22:18 +00:00
|
|
|
//! fn get_script_for_coin(coin: &OutPoint) -> Result<ScriptBuf, BlockFilterError> {
|
2021-02-17 20:35:13 +00:00
|
|
|
//! // get utxo ...
|
|
|
|
//! }
|
2022-01-24 00:24:32 +00:00
|
|
|
//!
|
2021-02-17 20:35:13 +00:00
|
|
|
//! // create a block filter for a block (server side)
|
|
|
|
//! let filter = BlockFilter::new_script_filter(&block, get_script_for_coin)?;
|
|
|
|
//!
|
|
|
|
//! // or create a filter from known raw data
|
|
|
|
//! let filter = BlockFilter::new(content);
|
2022-01-24 00:24:32 +00:00
|
|
|
//!
|
2021-02-17 20:35:13 +00:00
|
|
|
//! // read and evaluate a filter
|
2022-01-24 00:24:32 +00:00
|
|
|
//!
|
2022-07-30 12:22:18 +00:00
|
|
|
//! let query: Iterator<Item=ScriptBuf> = // .. some scripts you care about
|
2021-02-17 20:35:13 +00:00
|
|
|
//! if filter.match_any(&block_hash, &mut query.map(|s| s.as_bytes())) {
|
|
|
|
//! // get this block
|
|
|
|
//! }
|
|
|
|
//! ```
|
2022-01-24 00:24:32 +00:00
|
|
|
//!
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2021-06-09 10:40:41 +00:00
|
|
|
use core::cmp::{self, Ordering};
|
2022-08-31 04:45:14 +00:00
|
|
|
use core::convert::TryInto;
|
2022-08-24 06:34:11 +00:00
|
|
|
use core::fmt::{self, Display, Formatter};
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2023-03-22 03:09:58 +00:00
|
|
|
use hashes::{siphash24, Hash};
|
2023-03-28 01:16:47 +00:00
|
|
|
use internals::write_err;
|
2022-09-05 02:19:28 +00:00
|
|
|
|
2022-05-02 22:13:57 +00:00
|
|
|
use crate::blockdata::block::Block;
|
|
|
|
use crate::blockdata::script::Script;
|
|
|
|
use crate::blockdata::transaction::OutPoint;
|
|
|
|
use crate::consensus::encode::VarInt;
|
2022-08-24 06:34:11 +00:00
|
|
|
use crate::consensus::{Decodable, Encodable};
|
|
|
|
use crate::hash_types::{BlockHash, FilterHash, FilterHeader};
|
|
|
|
use crate::io;
|
|
|
|
use crate::prelude::*;
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
/// Golomb encoding parameter as in BIP-158, see also https://gist.github.com/sipa/576d5f09c3b86c3b1b75598d799fc845
|
|
|
|
const P: u8 = 19;
|
|
|
|
const M: u64 = 784931;
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Errors for blockfilter.
|
2019-07-26 07:36:25 +00:00
|
|
|
#[derive(Debug)]
|
2022-07-28 01:54:54 +00:00
|
|
|
#[non_exhaustive]
|
2019-07-26 07:36:25 +00:00
|
|
|
pub enum Error {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Missing UTXO, cannot calculate script filter.
|
2019-07-26 07:36:25 +00:00
|
|
|
UtxoMissing(OutPoint),
|
2022-08-09 06:33:58 +00:00
|
|
|
/// IO error reading or writing binary serialization of the filter.
|
2019-07-26 07:36:25 +00:00
|
|
|
Io(io::Error),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for Error {
|
|
|
|
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
use Error::*;
|
|
|
|
|
2019-07-26 07:36:25 +00:00
|
|
|
match *self {
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
UtxoMissing(ref coin) => write!(f, "unresolved UTXO {}", coin),
|
|
|
|
Io(ref e) => write_err!(f, "IO error"; e),
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-04 05:08:33 +00:00
|
|
|
#[cfg(feature = "std")]
|
2022-05-04 05:56:24 +00:00
|
|
|
impl std::error::Error for Error {
|
|
|
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
use Error::*;
|
2022-05-04 05:56:24 +00:00
|
|
|
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
match *self {
|
2022-05-04 05:56:24 +00:00
|
|
|
UtxoMissing(_) => None,
|
Make error types uniform
On our way to v1.0.0 we are defining a standard for our error types,
this includes:
- Uses the following derives (unless not possible, usually because of `io::Error`)
`#[derive(Debug, Clone, PartialEq, Eq)]`
- Has `non_exhaustive` unless we really know we can commit to not adding
anything.
Furthermore, we are trying to make the codebase easy to read. Error code
is write-once-read-many (well it should be) so if we make all the error
code super uniform the users can flick to an error and quickly see what
it includes. In an effort to achieve this I have made up a style and
over recent times have change much of the error code to that new style,
this PR audits _all_ error types in the code base and enforces the
style, specifically:
- Is layed out: definition, [impl block], Display impl, error::Error impl, From impls
- `error::Error` impl matches on enum even if it returns `None` for all variants
- Display/Error impls import enum variants locally
- match uses *self and `ref e`
- error::Error variants that return `Some` come first, `None` after
Re: non_exhaustive
To make dev and review easier I have added `non_exhaustive` to _every_
error type. We can then remove it error by error as we see fit. This is
because it takes a bit of thinking to do and review where as this patch
should not take much brain power to review.
2023-10-04 02:55:45 +00:00
|
|
|
Io(ref e) => Some(e),
|
2022-05-04 05:56:24 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-26 07:36:25 +00:00
|
|
|
impl From<io::Error> for Error {
|
2022-08-24 06:34:11 +00:00
|
|
|
fn from(io: io::Error) -> Self { Error::Io(io) }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// A block filter, as described by BIP 158.
|
2020-10-11 20:25:38 +00:00
|
|
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
2019-07-26 07:36:25 +00:00
|
|
|
pub struct BlockFilter {
|
|
|
|
/// Golomb encoded filter
|
2022-08-24 06:34:11 +00:00
|
|
|
pub content: Vec<u8>,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2020-08-07 12:23:35 +00:00
|
|
|
impl FilterHash {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Computes the filter header from a filter hash and previous filter header.
|
2020-08-07 12:23:35 +00:00
|
|
|
pub fn filter_header(&self, previous_filter_header: &FilterHeader) -> FilterHeader {
|
|
|
|
let mut header_data = [0u8; 64];
|
|
|
|
header_data[0..32].copy_from_slice(&self[..]);
|
|
|
|
header_data[32..64].copy_from_slice(&previous_filter_header[..]);
|
|
|
|
FilterHeader::hash(&header_data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-26 07:36:25 +00:00
|
|
|
impl BlockFilter {
|
2022-08-09 04:26:38 +00:00
|
|
|
/// Creates a new filter from pre-computed data.
|
2022-08-24 06:34:11 +00:00
|
|
|
pub fn new(content: &[u8]) -> BlockFilter { BlockFilter { content: content.to_vec() } }
|
2022-08-09 04:26:38 +00:00
|
|
|
|
2022-08-09 05:04:27 +00:00
|
|
|
/// Computes a SCRIPT_FILTER that contains spent and output scripts.
|
2022-07-30 12:22:18 +00:00
|
|
|
pub fn new_script_filter<M, S>(block: &Block, script_for_coin: M) -> Result<BlockFilter, Error>
|
2022-08-09 05:04:27 +00:00
|
|
|
where
|
2022-07-30 12:22:18 +00:00
|
|
|
M: Fn(&OutPoint) -> Result<S, Error>,
|
|
|
|
S: Borrow<Script>,
|
2022-08-09 05:04:27 +00:00
|
|
|
{
|
|
|
|
let mut out = Vec::new();
|
|
|
|
let mut writer = BlockFilterWriter::new(&mut out, block);
|
|
|
|
|
|
|
|
writer.add_output_scripts();
|
|
|
|
writer.add_input_scripts(script_for_coin)?;
|
|
|
|
writer.finish()?;
|
|
|
|
|
|
|
|
Ok(BlockFilter { content: out })
|
|
|
|
}
|
|
|
|
|
2022-08-09 05:08:29 +00:00
|
|
|
/// Computes this filter's ID in a chain of filters (see [BIP 157]).
|
|
|
|
///
|
|
|
|
/// [BIP 157]: <https://github.com/bitcoin/bips/blob/master/bip-0157.mediawiki#Filter_Headers>
|
2020-08-07 12:23:35 +00:00
|
|
|
pub fn filter_header(&self, previous_filter_header: &FilterHeader) -> FilterHeader {
|
2019-12-06 09:01:15 +00:00
|
|
|
let filter_hash = FilterHash::hash(self.content.as_slice());
|
2020-08-07 12:23:35 +00:00
|
|
|
filter_hash.filter_header(previous_filter_header)
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if any query matches against this [`BlockFilter`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_any<I>(&self, block_hash: &BlockHash, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
{
|
2019-08-09 14:58:02 +00:00
|
|
|
let filter_reader = BlockFilterReader::new(block_hash);
|
2022-08-08 05:30:26 +00:00
|
|
|
filter_reader.match_any(&mut self.content.as_slice(), query)
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if all queries match against this [`BlockFilter`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_all<I>(&self, block_hash: &BlockHash, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
{
|
2019-08-09 14:58:02 +00:00
|
|
|
let filter_reader = BlockFilterReader::new(block_hash);
|
2022-08-08 05:30:26 +00:00
|
|
|
filter_reader.match_all(&mut self.content.as_slice(), query)
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Compiles and writes a block filter.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub struct BlockFilterWriter<'a, W> {
|
2019-07-26 07:36:25 +00:00
|
|
|
block: &'a Block,
|
2022-08-09 05:09:04 +00:00
|
|
|
writer: GcsFilterWriter<'a, W>,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-08 06:24:25 +00:00
|
|
|
impl<'a, W: io::Write> BlockFilterWriter<'a, W> {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`BlockFilterWriter`] from `block`.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub fn new(writer: &'a mut W, block: &'a Block) -> BlockFilterWriter<'a, W> {
|
2023-01-28 22:47:24 +00:00
|
|
|
let block_hash_as_int = block.block_hash().to_byte_array();
|
2022-08-31 04:45:14 +00:00
|
|
|
let k0 = u64::from_le_bytes(block_hash_as_int[0..8].try_into().expect("8 byte slice"));
|
|
|
|
let k1 = u64::from_le_bytes(block_hash_as_int[8..16].try_into().expect("8 byte slice"));
|
2022-08-09 05:09:04 +00:00
|
|
|
let writer = GcsFilterWriter::new(writer, k0, k1, M, P);
|
2019-07-26 07:36:25 +00:00
|
|
|
BlockFilterWriter { block, writer }
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Adds output scripts of the block to filter (excluding OP_RETURN scripts).
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn add_output_scripts(&mut self) {
|
|
|
|
for transaction in &self.block.txdata {
|
|
|
|
for output in &transaction.output {
|
|
|
|
if !output.script_pubkey.is_op_return() {
|
|
|
|
self.add_element(output.script_pubkey.as_bytes());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Adds consumed output scripts of a block to filter.
|
2022-07-30 12:22:18 +00:00
|
|
|
pub fn add_input_scripts<M, S>(&mut self, script_for_coin: M) -> Result<(), Error>
|
2022-08-24 06:34:11 +00:00
|
|
|
where
|
2022-07-30 12:22:18 +00:00
|
|
|
M: Fn(&OutPoint) -> Result<S, Error>,
|
|
|
|
S: Borrow<Script>,
|
2022-08-24 06:34:11 +00:00
|
|
|
{
|
|
|
|
for script in self
|
|
|
|
.block
|
|
|
|
.txdata
|
|
|
|
.iter()
|
2019-07-26 07:36:25 +00:00
|
|
|
.skip(1) // skip coinbase
|
|
|
|
.flat_map(|t| t.input.iter().map(|i| &i.previous_output))
|
2022-08-24 06:34:11 +00:00
|
|
|
.map(script_for_coin)
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
match script {
|
2022-07-30 12:22:18 +00:00
|
|
|
Ok(script) => self.add_element(script.borrow().as_bytes()),
|
2022-08-24 06:34:11 +00:00
|
|
|
Err(e) => return Err(e),
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Adds an arbitrary element to filter.
|
2022-08-24 06:34:11 +00:00
|
|
|
pub fn add_element(&mut self, data: &[u8]) { self.writer.add_element(data); }
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Writes the block filter.
|
2022-08-24 06:34:11 +00:00
|
|
|
pub fn finish(&mut self) -> Result<usize, io::Error> { self.writer.finish() }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Reads and interprets a block filter.
|
2019-07-26 07:36:25 +00:00
|
|
|
pub struct BlockFilterReader {
|
2022-08-24 06:34:11 +00:00
|
|
|
reader: GcsFilterReader,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl BlockFilterReader {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`BlockFilterReader`] from `block_hash`.
|
2019-11-30 16:26:52 +00:00
|
|
|
pub fn new(block_hash: &BlockHash) -> BlockFilterReader {
|
2023-01-28 22:47:24 +00:00
|
|
|
let block_hash_as_int = block_hash.to_byte_array();
|
2022-08-31 04:45:14 +00:00
|
|
|
let k0 = u64::from_le_bytes(block_hash_as_int[0..8].try_into().expect("8 byte slice"));
|
|
|
|
let k1 = u64::from_le_bytes(block_hash_as_int[8..16].try_into().expect("8 byte slice"));
|
2022-08-09 05:09:04 +00:00
|
|
|
BlockFilterReader { reader: GcsFilterReader::new(k0, k1, M, P) }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if any query matches against this [`BlockFilterReader`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_any<I, R>(&self, reader: &mut R, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
R: io::Read + ?Sized,
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
self.reader.match_any(reader, query)
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if all queries match against this [`BlockFilterReader`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_all<I, R>(&self, reader: &mut R, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
R: io::Read + ?Sized,
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
self.reader.match_all(reader, query)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Golomb-Rice encoded filter reader.
|
2022-08-09 05:09:04 +00:00
|
|
|
pub struct GcsFilterReader {
|
|
|
|
filter: GcsFilter,
|
2022-08-24 06:34:11 +00:00
|
|
|
m: u64,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 05:09:04 +00:00
|
|
|
impl GcsFilterReader {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`GcsFilterReader`] with specific seed to siphash.
|
2022-08-09 05:09:04 +00:00
|
|
|
pub fn new(k0: u64, k1: u64, m: u64, p: u8) -> GcsFilterReader {
|
|
|
|
GcsFilterReader { filter: GcsFilter::new(k0, k1, p), m }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if any query matches against this [`GcsFilterReader`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_any<I, R>(&self, reader: &mut R, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
R: io::Read + ?Sized,
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut decoder = reader;
|
|
|
|
let n_elements: VarInt = Decodable::consensus_decode(&mut decoder).unwrap_or(VarInt(0));
|
2019-08-05 18:52:34 +00:00
|
|
|
let reader = &mut decoder;
|
2019-07-26 07:36:25 +00:00
|
|
|
// map hashes to [0, n_elements << grp]
|
|
|
|
let nm = n_elements.0 * self.m;
|
2022-09-05 02:19:28 +00:00
|
|
|
let mut mapped =
|
|
|
|
query.map(|e| map_to_range(self.filter.hash(e.borrow()), nm)).collect::<Vec<_>>();
|
2019-07-26 07:36:25 +00:00
|
|
|
// sort
|
2021-11-03 09:20:34 +00:00
|
|
|
mapped.sort_unstable();
|
2019-07-26 07:36:25 +00:00
|
|
|
if mapped.is_empty() {
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
if n_elements.0 == 0 {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
// find first match in two sorted arrays in one read pass
|
|
|
|
let mut reader = BitStreamReader::new(reader);
|
|
|
|
let mut data = self.filter.golomb_rice_decode(&mut reader)?;
|
|
|
|
let mut remaining = n_elements.0 - 1;
|
|
|
|
for p in mapped {
|
|
|
|
loop {
|
2019-08-05 19:41:07 +00:00
|
|
|
match data.cmp(&p) {
|
|
|
|
Ordering::Equal => return Ok(true),
|
2022-08-24 06:34:11 +00:00
|
|
|
Ordering::Less =>
|
2019-08-05 19:41:07 +00:00
|
|
|
if remaining > 0 {
|
|
|
|
data += self.filter.golomb_rice_decode(&mut reader)?;
|
|
|
|
remaining -= 1;
|
|
|
|
} else {
|
|
|
|
return Ok(false);
|
2022-08-24 06:34:11 +00:00
|
|
|
},
|
2019-08-05 19:41:07 +00:00
|
|
|
Ordering::Greater => break,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(false)
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Returns true if all queries match against this [`GcsFilterReader`].
|
2022-08-11 20:48:21 +00:00
|
|
|
pub fn match_all<I, R>(&self, reader: &mut R, query: I) -> Result<bool, Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
2022-08-11 20:48:21 +00:00
|
|
|
I: Iterator,
|
|
|
|
I::Item: Borrow<[u8]>,
|
2022-08-08 06:24:25 +00:00
|
|
|
R: io::Read + ?Sized,
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut decoder = reader;
|
|
|
|
let n_elements: VarInt = Decodable::consensus_decode(&mut decoder).unwrap_or(VarInt(0));
|
2019-08-05 18:52:34 +00:00
|
|
|
let reader = &mut decoder;
|
2019-07-26 07:36:25 +00:00
|
|
|
// map hashes to [0, n_elements << grp]
|
|
|
|
let nm = n_elements.0 * self.m;
|
2022-09-05 02:19:28 +00:00
|
|
|
let mut mapped =
|
|
|
|
query.map(|e| map_to_range(self.filter.hash(e.borrow()), nm)).collect::<Vec<_>>();
|
2019-07-26 07:36:25 +00:00
|
|
|
// sort
|
2021-11-03 09:20:34 +00:00
|
|
|
mapped.sort_unstable();
|
2019-07-26 07:36:25 +00:00
|
|
|
mapped.dedup();
|
|
|
|
if mapped.is_empty() {
|
|
|
|
return Ok(true);
|
|
|
|
}
|
|
|
|
if n_elements.0 == 0 {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
// figure if all mapped are there in one read pass
|
|
|
|
let mut reader = BitStreamReader::new(reader);
|
|
|
|
let mut data = self.filter.golomb_rice_decode(&mut reader)?;
|
|
|
|
let mut remaining = n_elements.0 - 1;
|
|
|
|
for p in mapped {
|
|
|
|
loop {
|
2019-08-05 19:41:07 +00:00
|
|
|
match data.cmp(&p) {
|
|
|
|
Ordering::Equal => break,
|
2022-08-24 06:34:11 +00:00
|
|
|
Ordering::Less =>
|
2019-08-05 19:41:07 +00:00
|
|
|
if remaining > 0 {
|
|
|
|
data += self.filter.golomb_rice_decode(&mut reader)?;
|
|
|
|
remaining -= 1;
|
|
|
|
} else {
|
|
|
|
return Ok(false);
|
2022-08-24 06:34:11 +00:00
|
|
|
},
|
2019-08-05 19:41:07 +00:00
|
|
|
Ordering::Greater => return Ok(false),
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Fast reduction of hash to [0, nm) range.
|
2022-08-24 06:34:11 +00:00
|
|
|
fn map_to_range(hash: u64, nm: u64) -> u64 { ((hash as u128 * nm as u128) >> 64) as u64 }
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Golomb-Rice encoded filter writer.
|
2022-08-09 05:09:04 +00:00
|
|
|
pub struct GcsFilterWriter<'a, W> {
|
|
|
|
filter: GcsFilter,
|
2022-08-08 06:24:25 +00:00
|
|
|
writer: &'a mut W,
|
2023-01-22 06:29:18 +00:00
|
|
|
elements: BTreeSet<Vec<u8>>,
|
2022-08-24 06:34:11 +00:00
|
|
|
m: u64,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 05:09:04 +00:00
|
|
|
impl<'a, W: io::Write> GcsFilterWriter<'a, W> {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`GcsFilterWriter`] wrapping a generic writer, with specific seed to siphash.
|
2022-08-09 05:09:04 +00:00
|
|
|
pub fn new(writer: &'a mut W, k0: u64, k1: u64, m: u64, p: u8) -> GcsFilterWriter<'a, W> {
|
2023-01-22 06:29:18 +00:00
|
|
|
GcsFilterWriter { filter: GcsFilter::new(k0, k1, p), writer, elements: BTreeSet::new(), m }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Adds data to the filter.
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn add_element(&mut self, element: &[u8]) {
|
|
|
|
if !element.is_empty() {
|
|
|
|
self.elements.insert(element.to_vec());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Writes the filter to the wrapped writer.
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn finish(&mut self) -> Result<usize, io::Error> {
|
|
|
|
let nm = self.elements.len() as u64 * self.m;
|
|
|
|
|
|
|
|
// map hashes to [0, n_elements * M)
|
2022-08-24 06:34:11 +00:00
|
|
|
let mut mapped: Vec<_> = self
|
|
|
|
.elements
|
|
|
|
.iter()
|
|
|
|
.map(|e| map_to_range(self.filter.hash(e.as_slice()), nm))
|
|
|
|
.collect();
|
2021-11-03 09:20:34 +00:00
|
|
|
mapped.sort_unstable();
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
// write number of elements as varint
|
2023-08-24 00:37:53 +00:00
|
|
|
let mut wrote = VarInt::from(mapped.len()).consensus_encode(&mut self.writer)?;
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
// write out deltas of sorted values into a Golonb-Rice coded bit stream
|
|
|
|
let mut writer = BitStreamWriter::new(self.writer);
|
|
|
|
let mut last = 0;
|
|
|
|
for data in mapped {
|
|
|
|
wrote += self.filter.golomb_rice_encode(&mut writer, data - last)?;
|
|
|
|
last = data;
|
|
|
|
}
|
|
|
|
wrote += writer.flush()?;
|
|
|
|
Ok(wrote)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Golomb Coded Set Filter.
|
2022-08-09 05:09:04 +00:00
|
|
|
struct GcsFilter {
|
2019-07-26 07:36:25 +00:00
|
|
|
k0: u64, // sip hash key
|
|
|
|
k1: u64, // sip hash key
|
2022-08-24 06:34:11 +00:00
|
|
|
p: u8,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 05:09:04 +00:00
|
|
|
impl GcsFilter {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`GcsFilter`].
|
2022-08-24 06:34:11 +00:00
|
|
|
fn new(k0: u64, k1: u64, p: u8) -> GcsFilter { GcsFilter { k0, k1, p } }
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Golomb-Rice encodes a number `n` to a bit stream (parameter 2^k).
|
2023-01-26 21:34:12 +00:00
|
|
|
fn golomb_rice_encode<W>(
|
2022-08-24 06:34:11 +00:00
|
|
|
&self,
|
2023-01-30 21:07:05 +00:00
|
|
|
writer: &mut BitStreamWriter<'_, W>,
|
2022-08-24 06:34:11 +00:00
|
|
|
n: u64,
|
|
|
|
) -> Result<usize, io::Error>
|
2022-08-08 06:24:25 +00:00
|
|
|
where
|
|
|
|
W: io::Write,
|
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut wrote = 0;
|
|
|
|
let mut q = n >> self.p;
|
|
|
|
while q > 0 {
|
|
|
|
let nbits = cmp::min(q, 64);
|
|
|
|
wrote += writer.write(!0u64, nbits as u8)?;
|
|
|
|
q -= nbits;
|
|
|
|
}
|
|
|
|
wrote += writer.write(0, 1)?;
|
|
|
|
wrote += writer.write(n, self.p)?;
|
|
|
|
Ok(wrote)
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Golomb-Rice decodes a number from a bit stream (parameter 2^k).
|
2022-08-08 06:24:25 +00:00
|
|
|
fn golomb_rice_decode<R>(&self, reader: &mut BitStreamReader<R>) -> Result<u64, io::Error>
|
|
|
|
where
|
2022-08-24 06:34:11 +00:00
|
|
|
R: io::Read,
|
2022-08-08 06:24:25 +00:00
|
|
|
{
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut q = 0u64;
|
|
|
|
while reader.read(1)? == 1 {
|
|
|
|
q += 1;
|
|
|
|
}
|
|
|
|
let r = reader.read(self.p)?;
|
2019-08-05 19:41:07 +00:00
|
|
|
Ok((q << self.p) + r)
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Hashes an arbitrary slice with siphash using parameters of this filter.
|
2019-07-26 07:36:25 +00:00
|
|
|
fn hash(&self, element: &[u8]) -> u64 {
|
|
|
|
siphash24::Hash::hash_to_u64_with_keys(self.k0, self.k1, element)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Bitwise stream reader.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub struct BitStreamReader<'a, R> {
|
2019-07-26 07:36:25 +00:00
|
|
|
buffer: [u8; 1],
|
|
|
|
offset: u8,
|
2022-08-08 06:24:25 +00:00
|
|
|
reader: &'a mut R,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-08 06:24:25 +00:00
|
|
|
impl<'a, R: io::Read> BitStreamReader<'a, R> {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`BitStreamReader`] that reads bitwise from a given `reader`.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub fn new(reader: &'a mut R) -> BitStreamReader<'a, R> {
|
2022-08-24 06:34:11 +00:00
|
|
|
BitStreamReader { buffer: [0u8], reader, offset: 8 }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Reads nbit bits, returning the bits in a `u64` starting with the rightmost bit.
|
|
|
|
///
|
|
|
|
/// # Examples
|
|
|
|
/// ```
|
2022-09-07 00:47:20 +00:00
|
|
|
/// # use bitcoin::bip158::BitStreamReader;
|
2022-08-09 06:33:58 +00:00
|
|
|
/// # let data = vec![0xff];
|
|
|
|
/// # let mut input = data.as_slice();
|
|
|
|
/// let mut reader = BitStreamReader::new(&mut input); // input contains all 1's
|
|
|
|
/// let res = reader.read(1).expect("read failed");
|
|
|
|
/// assert_eq!(res, 1_u64);
|
|
|
|
/// ```
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn read(&mut self, mut nbits: u8) -> Result<u64, io::Error> {
|
|
|
|
if nbits > 64 {
|
2022-08-24 06:34:11 +00:00
|
|
|
return Err(io::Error::new(
|
|
|
|
io::ErrorKind::Other,
|
|
|
|
"can not read more than 64 bits at once",
|
|
|
|
));
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
let mut data = 0u64;
|
|
|
|
while nbits > 0 {
|
|
|
|
if self.offset == 8 {
|
|
|
|
self.reader.read_exact(&mut self.buffer)?;
|
|
|
|
self.offset = 0;
|
|
|
|
}
|
|
|
|
let bits = cmp::min(8 - self.offset, nbits);
|
|
|
|
data <<= bits;
|
|
|
|
data |= ((self.buffer[0] << self.offset) >> (8 - bits)) as u64;
|
|
|
|
self.offset += bits;
|
|
|
|
nbits -= bits;
|
|
|
|
}
|
|
|
|
Ok(data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Bitwise stream writer.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub struct BitStreamWriter<'a, W> {
|
2019-07-26 07:36:25 +00:00
|
|
|
buffer: [u8; 1],
|
|
|
|
offset: u8,
|
2022-08-08 06:24:25 +00:00
|
|
|
writer: &'a mut W,
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-08 06:24:25 +00:00
|
|
|
impl<'a, W: io::Write> BitStreamWriter<'a, W> {
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Creates a new [`BitStreamWriter`] that writes bitwise to a given `writer`.
|
2022-08-08 06:24:25 +00:00
|
|
|
pub fn new(writer: &'a mut W) -> BitStreamWriter<'a, W> {
|
2022-08-24 06:34:11 +00:00
|
|
|
BitStreamWriter { buffer: [0u8], writer, offset: 0 }
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// Writes nbits bits from data.
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn write(&mut self, data: u64, mut nbits: u8) -> Result<usize, io::Error> {
|
|
|
|
if nbits > 64 {
|
2022-08-24 06:34:11 +00:00
|
|
|
return Err(io::Error::new(
|
|
|
|
io::ErrorKind::Other,
|
|
|
|
"can not write more than 64 bits at once",
|
|
|
|
));
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
let mut wrote = 0;
|
|
|
|
while nbits > 0 {
|
|
|
|
let bits = cmp::min(8 - self.offset, nbits);
|
|
|
|
self.buffer[0] |= ((data << (64 - nbits)) >> (64 - 8 + self.offset)) as u8;
|
|
|
|
self.offset += bits;
|
|
|
|
nbits -= bits;
|
|
|
|
if self.offset == 8 {
|
|
|
|
wrote += self.flush()?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(wrote)
|
|
|
|
}
|
|
|
|
|
2022-08-09 06:33:58 +00:00
|
|
|
/// flush bits not yet written.
|
2019-07-26 07:36:25 +00:00
|
|
|
pub fn flush(&mut self) -> Result<usize, io::Error> {
|
|
|
|
if self.offset > 0 {
|
|
|
|
self.writer.write_all(&self.buffer)?;
|
|
|
|
self.buffer[0] = 0u8;
|
|
|
|
self.offset = 0;
|
|
|
|
Ok(1)
|
|
|
|
} else {
|
|
|
|
Ok(0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
2022-08-09 05:50:02 +00:00
|
|
|
use std::collections::HashMap;
|
|
|
|
|
2023-08-10 22:44:58 +00:00
|
|
|
use hex::test_hex_unwrap as hex;
|
2022-08-09 05:50:02 +00:00
|
|
|
use serde_json::Value;
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-08-24 06:34:11 +00:00
|
|
|
use super::*;
|
2022-05-02 22:13:57 +00:00
|
|
|
use crate::consensus::encode::deserialize;
|
2022-08-09 05:50:02 +00:00
|
|
|
use crate::hash_types::BlockHash;
|
2022-11-02 22:36:37 +00:00
|
|
|
use crate::ScriptBuf;
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_blockfilters() {
|
|
|
|
// test vectors from: https://github.com/jimpo/bitcoin/blob/c7efb652f3543b001b4dd22186a354605b14f47e/src/test/data/blockfilters.json
|
2022-01-12 03:22:45 +00:00
|
|
|
let data = include_str!("../tests/data/blockfilters.json");
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
let testdata = serde_json::from_str::<Value>(data).unwrap().as_array().unwrap().clone();
|
|
|
|
for t in testdata.iter().skip(1) {
|
2023-01-20 00:24:48 +00:00
|
|
|
let block_hash = t.get(1).unwrap().as_str().unwrap().parse::<BlockHash>().unwrap();
|
2022-11-02 22:36:37 +00:00
|
|
|
let block: Block = deserialize(&hex!(t.get(2).unwrap().as_str().unwrap())).unwrap();
|
2020-01-10 11:34:16 +00:00
|
|
|
assert_eq!(block.block_hash(), block_hash);
|
2019-07-26 07:36:25 +00:00
|
|
|
let scripts = t.get(3).unwrap().as_array().unwrap();
|
2022-08-24 06:34:11 +00:00
|
|
|
let previous_filter_header =
|
2023-01-20 00:24:48 +00:00
|
|
|
t.get(4).unwrap().as_str().unwrap().parse::<FilterHeader>().unwrap();
|
2022-12-03 19:57:18 +00:00
|
|
|
let filter_content = hex!(t.get(5).unwrap().as_str().unwrap());
|
2022-08-24 06:34:11 +00:00
|
|
|
let filter_header =
|
2023-01-20 00:24:48 +00:00
|
|
|
t.get(6).unwrap().as_str().unwrap().parse::<FilterHeader>().unwrap();
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
let mut txmap = HashMap::new();
|
|
|
|
let mut si = scripts.iter();
|
|
|
|
for tx in block.txdata.iter().skip(1) {
|
|
|
|
for input in tx.input.iter() {
|
2022-08-24 06:34:11 +00:00
|
|
|
txmap.insert(
|
|
|
|
input.previous_output,
|
2022-12-03 19:57:18 +00:00
|
|
|
ScriptBuf::from(hex!(si.next().unwrap().as_str().unwrap())),
|
2022-08-24 06:34:11 +00:00
|
|
|
);
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-24 06:34:11 +00:00
|
|
|
let filter = BlockFilter::new_script_filter(&block, |o| {
|
|
|
|
if let Some(s) = txmap.get(o) {
|
|
|
|
Ok(s.clone())
|
|
|
|
} else {
|
|
|
|
Err(Error::UtxoMissing(*o))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap();
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2019-08-09 14:58:02 +00:00
|
|
|
let test_filter = BlockFilter::new(filter_content.as_slice());
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
assert_eq!(test_filter.content, filter.content);
|
|
|
|
|
2020-01-10 11:34:16 +00:00
|
|
|
let block_hash = &block.block_hash();
|
2022-08-24 06:34:11 +00:00
|
|
|
assert!(filter
|
|
|
|
.match_all(
|
|
|
|
block_hash,
|
|
|
|
&mut txmap.iter().filter_map(|(_, s)| if !s.is_empty() {
|
|
|
|
Some(s.as_bytes())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
})
|
|
|
|
)
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-06-07 05:21:29 +00:00
|
|
|
for script in txmap.values() {
|
2023-08-25 02:30:04 +00:00
|
|
|
let query = [script];
|
2022-08-24 06:34:11 +00:00
|
|
|
if !script.is_empty() {
|
|
|
|
assert!(filter
|
|
|
|
.match_any(block_hash, &mut query.iter().map(|s| s.as_bytes()))
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-07 12:23:35 +00:00
|
|
|
assert_eq!(filter_header, filter.filter_header(&previous_filter_header));
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2022-01-24 00:26:29 +00:00
|
|
|
fn test_filter() {
|
2023-01-22 06:29:18 +00:00
|
|
|
let mut patterns = BTreeSet::new();
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2022-12-03 19:57:18 +00:00
|
|
|
patterns.insert(hex!("000000"));
|
|
|
|
patterns.insert(hex!("111111"));
|
|
|
|
patterns.insert(hex!("222222"));
|
|
|
|
patterns.insert(hex!("333333"));
|
|
|
|
patterns.insert(hex!("444444"));
|
|
|
|
patterns.insert(hex!("555555"));
|
|
|
|
patterns.insert(hex!("666666"));
|
|
|
|
patterns.insert(hex!("777777"));
|
|
|
|
patterns.insert(hex!("888888"));
|
|
|
|
patterns.insert(hex!("999999"));
|
|
|
|
patterns.insert(hex!("aaaaaa"));
|
|
|
|
patterns.insert(hex!("bbbbbb"));
|
|
|
|
patterns.insert(hex!("cccccc"));
|
|
|
|
patterns.insert(hex!("dddddd"));
|
|
|
|
patterns.insert(hex!("eeeeee"));
|
|
|
|
patterns.insert(hex!("ffffff"));
|
2019-07-26 07:36:25 +00:00
|
|
|
|
2021-06-09 10:34:44 +00:00
|
|
|
let mut out = Vec::new();
|
2019-07-26 07:36:25 +00:00
|
|
|
{
|
2022-08-09 05:09:04 +00:00
|
|
|
let mut writer = GcsFilterWriter::new(&mut out, 0, 0, M, P);
|
2019-07-26 07:36:25 +00:00
|
|
|
for p in &patterns {
|
|
|
|
writer.add_element(p.as_slice());
|
|
|
|
}
|
|
|
|
writer.finish().unwrap();
|
|
|
|
}
|
|
|
|
|
2021-06-09 10:34:44 +00:00
|
|
|
let bytes = out;
|
2019-07-26 07:36:25 +00:00
|
|
|
|
|
|
|
{
|
2022-12-03 19:57:18 +00:00
|
|
|
let query = vec![hex!("abcdef"), hex!("eeeeee")];
|
2022-08-09 05:09:04 +00:00
|
|
|
let reader = GcsFilterReader::new(0, 0, M, P);
|
2022-08-24 06:34:11 +00:00
|
|
|
assert!(reader
|
|
|
|
.match_any(&mut bytes.as_slice(), &mut query.iter().map(|v| v.as_slice()))
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
{
|
2022-12-03 19:57:18 +00:00
|
|
|
let query = vec![hex!("abcdef"), hex!("123456")];
|
2022-08-09 05:09:04 +00:00
|
|
|
let reader = GcsFilterReader::new(0, 0, M, P);
|
2022-08-24 06:34:11 +00:00
|
|
|
assert!(!reader
|
|
|
|
.match_any(&mut bytes.as_slice(), &mut query.iter().map(|v| v.as_slice()))
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
{
|
2022-08-09 05:09:04 +00:00
|
|
|
let reader = GcsFilterReader::new(0, 0, M, P);
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut query = Vec::new();
|
|
|
|
for p in &patterns {
|
|
|
|
query.push(p.clone());
|
|
|
|
}
|
2022-08-24 06:34:11 +00:00
|
|
|
assert!(reader
|
|
|
|
.match_all(&mut bytes.as_slice(), &mut query.iter().map(|v| v.as_slice()))
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
{
|
2022-08-09 05:09:04 +00:00
|
|
|
let reader = GcsFilterReader::new(0, 0, M, P);
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut query = Vec::new();
|
|
|
|
for p in &patterns {
|
|
|
|
query.push(p.clone());
|
|
|
|
}
|
2022-12-03 19:57:18 +00:00
|
|
|
query.push(hex!("abcdef"));
|
2022-08-24 06:34:11 +00:00
|
|
|
assert!(!reader
|
|
|
|
.match_all(&mut bytes.as_slice(), &mut query.iter().map(|v| v.as_slice()))
|
|
|
|
.unwrap());
|
2019-07-26 07:36:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_bit_stream() {
|
2021-06-09 10:34:44 +00:00
|
|
|
let mut out = Vec::new();
|
2019-07-26 07:36:25 +00:00
|
|
|
{
|
|
|
|
let mut writer = BitStreamWriter::new(&mut out);
|
|
|
|
writer.write(0, 1).unwrap(); // 0
|
|
|
|
writer.write(2, 2).unwrap(); // 10
|
|
|
|
writer.write(6, 3).unwrap(); // 110
|
|
|
|
writer.write(11, 4).unwrap(); // 1011
|
|
|
|
writer.write(1, 5).unwrap(); // 00001
|
|
|
|
writer.write(32, 6).unwrap(); // 100000
|
|
|
|
writer.write(7, 7).unwrap(); // 0000111
|
|
|
|
writer.flush().unwrap();
|
|
|
|
}
|
2021-06-09 10:34:44 +00:00
|
|
|
let bytes = out;
|
2022-08-24 06:34:11 +00:00
|
|
|
assert_eq!(
|
|
|
|
"01011010110000110000000001110000",
|
|
|
|
format!("{:08b}{:08b}{:08b}{:08b}", bytes[0], bytes[1], bytes[2], bytes[3])
|
|
|
|
);
|
2019-07-26 07:36:25 +00:00
|
|
|
{
|
2022-08-08 05:30:26 +00:00
|
|
|
let mut input = bytes.as_slice();
|
2019-07-26 07:36:25 +00:00
|
|
|
let mut reader = BitStreamReader::new(&mut input);
|
|
|
|
assert_eq!(reader.read(1).unwrap(), 0);
|
|
|
|
assert_eq!(reader.read(2).unwrap(), 2);
|
|
|
|
assert_eq!(reader.read(3).unwrap(), 6);
|
|
|
|
assert_eq!(reader.read(4).unwrap(), 11);
|
|
|
|
assert_eq!(reader.read(5).unwrap(), 1);
|
|
|
|
assert_eq!(reader.read(6).unwrap(), 32);
|
|
|
|
assert_eq!(reader.read(7).unwrap(), 7);
|
|
|
|
// 4 bits remained
|
|
|
|
assert!(reader.read(5).is_err());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|