no_std support

Based on the original work by Justin Moon.

*MSRV unchanged from 1.29.0.*

When `std` is off, `no-std` must be on, and we use the [`alloc`](https://doc.rust-lang.org/alloc/) and core2 crates. The `alloc` crate requires the user define a global allocator.

* Import from `core` and `alloc` instead of `std`
* `alloc` only used if `no-std` is on
* Create `std` feature
* Create `no-std` feature which adds a core2 dependency to polyfill `std::io` features. This is an experimental feature and should be
used with caution.
* CI runs tests `no-std`
* MSRV for `no-std` is 1.51 or so
This commit is contained in:
Devrandom 2021-06-09 12:34:44 +02:00
parent abff973e83
commit 4826d0c6cc
42 changed files with 432 additions and 185 deletions

View File

@ -13,14 +13,17 @@ jobs:
env: env:
DO_COV: true DO_COV: true
AS_DEPENDENCY: true AS_DEPENDENCY: true
DO_NO_STD: true
- rust: beta - rust: beta
env: env:
AS_DEPENDENCY: true AS_DEPENDENCY: true
DO_NO_STD: true
- rust: nightly - rust: nightly
env: env:
DO_FUZZ: true DO_FUZZ: true
DO_BENCH: true DO_BENCH: true
AS_DEPENDENCY: true AS_DEPENDENCY: true
DO_NO_STD: true
- rust: 1.29.0 - rust: 1.29.0
env: env:
AS_DEPENDENCY: true AS_DEPENDENCY: true

View File

@ -12,7 +12,7 @@ readme = "README.md"
[features] [features]
default = [ "secp-recovery" ] default = [ "std", "secp-recovery" ]
base64 = [ "base64-compat" ] base64 = [ "base64-compat" ]
fuzztarget = [] fuzztarget = []
unstable = [] unstable = []
@ -21,14 +21,24 @@ use-serde = ["serde", "bitcoin_hashes/serde", "secp256k1/serde"]
secp-lowmemory = ["secp256k1/lowmemory"] secp-lowmemory = ["secp256k1/lowmemory"]
secp-recovery = ["secp256k1/recovery"] secp-recovery = ["secp256k1/recovery"]
# At least one of std, no-std must be enabled.
#
# The no-std feature doesn't disable std - you need to turn off the std feature for that by disabling default.
# Instead no-std enables additional features required for this crate to be usable without std.
# As a result, both can be enabled without conflict.
std = ["secp256k1/std", "bitcoin_hashes/std", "bech32/std"]
no-std = ["hashbrown", "core2/alloc", "bitcoin_hashes/alloc"]
[dependencies] [dependencies]
bech32 = "0.8.0" bech32 = { version = "0.8.1", default-features = false }
bitcoin_hashes = "0.9.6" bitcoin_hashes = { version = "0.10.0", default-features = false }
secp256k1 = "0.20.2" secp256k1 = { version = "0.20.2", default-features = false }
core2 = { version = "0.3.0", optional = true, default-features = false }
base64-compat = { version = "1.0.0", optional = true } base64-compat = { version = "1.0.0", optional = true }
bitcoinconsensus = { version = "0.19.0-3", optional = true } bitcoinconsensus = { version = "0.19.0-3", optional = true }
serde = { version = "1", features = [ "derive" ], optional = true } serde = { version = "1", features = [ "derive" ], optional = true }
hashbrown = { version = "0.8", optional = true }
[dev-dependencies] [dev-dependencies]
serde_json = "<1.0.45" serde_json = "<1.0.45"
@ -37,3 +47,10 @@ secp256k1 = { version = "0.20.0", features = [ "recovery", "rand-std" ] }
bincode = "1.3.1" bincode = "1.3.1"
# We need to pin ryu (transitive dep from serde_json) to stay compatible with Rust 1.22.0 # We need to pin ryu (transitive dep from serde_json) to stay compatible with Rust 1.22.0
ryu = "<1.0.5" ryu = "<1.0.5"
[[example]]
name = "bip32"
[[example]]
name = "handshake"
required-features = ["std"]

View File

@ -1,6 +1,12 @@
#!/bin/sh -ex #!/bin/sh -ex
FEATURES="base64 bitcoinconsensus use-serde rand" FEATURES="base64 bitcoinconsensus use-serde rand secp-recovery"
# Use toolchain if explicitly specified
if [ -n "$TOOLCHAIN" ]
then
alias cargo="cargo +$TOOLCHAIN"
fi
pin_common_verions() { pin_common_verions() {
cargo generate-lockfile --verbose cargo generate-lockfile --verbose
@ -10,7 +16,7 @@ pin_common_verions() {
} }
# Pin `cc` for Rust 1.29 # Pin `cc` for Rust 1.29
if [ -n "$PIN_VERSIONS" ]; then if [ "$PIN_VERSIONS" = true ]; then
pin_common_verions pin_common_verions
cargo update -p byteorder --precise "1.3.4" cargo update -p byteorder --precise "1.3.4"
fi fi
@ -21,20 +27,43 @@ then
fi fi
# Use toolchain if explicitly specified echo "********* Testing std *************"
if [ -n "$TOOLCHAIN" ] # Test without any features other than std first
then cargo test --verbose --no-default-features --features="std"
alias cargo="cargo +$TOOLCHAIN"
fi
# Test without any features first echo "********* Testing default *************"
cargo test --verbose --no-default-features
# Then test with the default features # Then test with the default features
cargo test --verbose cargo test --verbose
if [ "$DO_NO_STD" = true ]
then
echo "********* Testing no-std build *************"
# Build no_std, to make sure that cfg(test) doesn't hide any issues
cargo build --verbose --features="no-std" --no-default-features
# Build std + no_std, to make sure they are not incompatible
cargo build --verbose --features="no-std"
# Test no_std
cargo test --verbose --features="no-std" --no-default-features
# Build all features
cargo build --verbose --features="no-std $FEATURES" --no-default-features
# Build specific features
for feature in ${FEATURES}
do
cargo build --verbose --features="no-std $feature"
done
cargo run --example bip32 L1HKVVLHXiUhecWnwFYF6L3shkf1E12HUmuZTESvBXUdx3yqVP1D
cargo run --no-default-features --features no-std --example bip32 L1HKVVLHXiUhecWnwFYF6L3shkf1E12HUmuZTESvBXUdx3yqVP1D
fi
# Test each feature # Test each feature
for feature in ${FEATURES} for feature in ${FEATURES}
do do
echo "********* Testing "$feature" *************"
cargo test --verbose --features="$feature" cargo test --verbose --features="$feature"
done done
@ -55,7 +84,7 @@ then
fi fi
# Use as dependency if told to # Use as dependency if told to
if [ -n "$AS_DEPENDENCY" ] if [ "$AS_DEPENDENCY" = true ]
then then
cargo new dep_test cargo new dep_test
cd dep_test cd dep_test

View File

@ -10,6 +10,7 @@ use bitcoin::util::bip32::ExtendedPubKey;
use bitcoin::util::bip32::DerivationPath; use bitcoin::util::bip32::DerivationPath;
use bitcoin::util::bip32::ChildNumber; use bitcoin::util::bip32::ChildNumber;
use bitcoin::util::address::Address; use bitcoin::util::address::Address;
use bitcoin::secp256k1::ffi::types::AlignedType;
fn main() { fn main() {
// This example derives root xprv // This example derives root xprv
@ -36,7 +37,9 @@ fn main() {
let seed = wif.to_bytes(); let seed = wif.to_bytes();
// we need secp256k1 context for key derivation // we need secp256k1 context for key derivation
let secp = Secp256k1::new(); let mut buf: Vec<AlignedType> = Vec::new();
buf.resize(Secp256k1::preallocate_size(), AlignedType::zeroed());
let secp = Secp256k1::preallocated_new(buf.as_mut_slice()).unwrap();
// calculate root key from seed // calculate root key from seed
let root = ExtendedPrivKey::new_master(network, &seed).unwrap(); let root = ExtendedPrivKey::new_master(network, &seed).unwrap();

View File

@ -20,6 +20,8 @@
//! these blocks and the blockchain. //! these blocks and the blockchain.
//! //!
use prelude::*;
use core::fmt; use core::fmt;
use util; use util;
@ -308,6 +310,7 @@ impl fmt::Display for Bip34Error {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for Bip34Error {} impl ::std::error::Error for Bip34Error {}
#[cfg(test)] #[cfg(test)]

View File

@ -19,9 +19,11 @@
//! single transaction //! single transaction
//! //!
use prelude::*;
use core::default::Default; use core::default::Default;
use hashes::hex::FromHex; use hashes::hex::{HexIterator, Error as HexError};
use hashes::sha256d; use hashes::sha256d;
use blockdata::opcodes; use blockdata::opcodes;
use blockdata::script; use blockdata::script;
@ -84,8 +86,11 @@ fn bitcoin_genesis_tx() -> Transaction {
}); });
// Outputs // Outputs
let script_bytes: Result<Vec<u8>, HexError> =
HexIterator::new("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f").unwrap()
.collect();
let out_script = script::Builder::new() let out_script = script::Builder::new()
.push_slice(&Vec::from_hex("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f").unwrap()) .push_slice(script_bytes.unwrap().as_slice())
.push_opcode(opcodes::all::OP_CHECKSIG) .push_opcode(opcodes::all::OP_CHECKSIG)
.into_script(); .into_script();
ret.output.push(TxOut { ret.output.push(TxOut {

View File

@ -22,6 +22,8 @@
#[cfg(feature = "serde")] use serde; #[cfg(feature = "serde")] use serde;
#[cfg(feature = "serde")] use prelude::*;
use core::{fmt, convert::From}; use core::{fmt, convert::From};
// Note: I am deliberately not implementing PartialOrd or Ord on the // Note: I am deliberately not implementing PartialOrd or Ord on the

View File

@ -24,8 +24,9 @@
//! This module provides the structures and functions needed to support scripts. //! This module provides the structures and functions needed to support scripts.
//! //!
use io; use prelude::*;
use io;
use core::{fmt, default::Default}; use core::{fmt, default::Default};
#[cfg(feature = "serde")] use serde; #[cfg(feature = "serde")] use serde;
@ -36,7 +37,7 @@ use consensus::{encode, Decodable, Encodable};
use hashes::{Hash, hex}; use hashes::{Hash, hex};
use policy::DUST_RELAY_TX_FEE; use policy::DUST_RELAY_TX_FEE;
#[cfg(feature="bitcoinconsensus")] use bitcoinconsensus; #[cfg(feature="bitcoinconsensus")] use bitcoinconsensus;
#[cfg(feature="bitcoinconsensus")] use std::convert; #[cfg(feature="bitcoinconsensus")] use core::convert::From;
#[cfg(feature="bitcoinconsensus")] use OutPoint; #[cfg(feature="bitcoinconsensus")] use OutPoint;
use util::ecdsa::PublicKey; use util::ecdsa::PublicKey;
@ -145,11 +146,12 @@ impl fmt::Display for Error {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for Error {} impl ::std::error::Error for Error {}
#[cfg(feature="bitcoinconsensus")] #[cfg(feature="bitcoinconsensus")]
#[doc(hidden)] #[doc(hidden)]
impl convert::From<bitcoinconsensus::Error> for Error { impl From<bitcoinconsensus::Error> for Error {
fn from(err: bitcoinconsensus::Error) -> Error { fn from(err: bitcoinconsensus::Error) -> Error {
match err { match err {
_ => Error::BitcoinConsensus(err) _ => Error::BitcoinConsensus(err)
@ -421,11 +423,11 @@ impl Script {
} else if self.is_witness_program() { } else if self.is_witness_program() {
32 + 4 + 1 + (107 / 4) + 4 + // The spend cost copied from Core 32 + 4 + 1 + (107 / 4) + 4 + // The spend cost copied from Core
8 + // The serialized size of the TxOut's amount field 8 + // The serialized size of the TxOut's amount field
self.consensus_encode(&mut ::std::io::sink()).unwrap() as u64 // The serialized size of this script_pubkey self.consensus_encode(&mut sink()).unwrap() as u64 // The serialized size of this script_pubkey
} else { } else {
32 + 4 + 1 + 107 + 4 + // The spend cost copied from Core 32 + 4 + 1 + 107 + 4 + // The spend cost copied from Core
8 + // The serialized size of the TxOut's amount field 8 + // The serialized size of the TxOut's amount field
self.consensus_encode(&mut ::std::io::sink()).unwrap() as u64 // The serialized size of this script_pubkey self.consensus_encode(&mut sink()).unwrap() as u64 // The serialized size of this script_pubkey
}; };
::Amount::from_sat(sats) ::Amount::from_sat(sats)

View File

@ -23,9 +23,11 @@
//! This module provides the structures and functions needed to support transactions. //! This module provides the structures and functions needed to support transactions.
//! //!
use prelude::*;
use io; use io;
use core::{fmt, str, default::Default}; use core::{fmt, str, default::Default};
use std::error; #[cfg(feature = "std")] use std::error;
use hashes::{self, Hash, sha256d}; use hashes::{self, Hash, sha256d};
use hashes::hex::FromHex; use hashes::hex::FromHex;
@ -130,6 +132,7 @@ impl fmt::Display for ParseOutPointError {
} }
} }
#[cfg(feature = "std")]
impl error::Error for ParseOutPointError { impl error::Error for ParseOutPointError {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
@ -630,6 +633,7 @@ impl fmt::Display for NonStandardSigHashType {
} }
} }
#[cfg(feature = "std")]
impl error::Error for NonStandardSigHashType {} impl error::Error for NonStandardSigHashType {}
/// Hashtype of an input's signature, encoded in the last byte of the signature /// Hashtype of an input's signature, encoded in the last byte of the signature
@ -1360,6 +1364,7 @@ mod tests {
use hashes::hex::FromHex; use hashes::hex::FromHex;
use std::collections::HashMap; use std::collections::HashMap;
use blockdata::script; use blockdata::script;
// a random recent segwit transaction from blockchain using both old and segwit inputs // a random recent segwit transaction from blockchain using both old and segwit inputs
let mut spending: Transaction = deserialize(Vec::from_hex("020000000001031cfbc8f54fbfa4a33a30068841371f80dbfe166211242213188428f437445c91000000006a47304402206fbcec8d2d2e740d824d3d36cc345b37d9f65d665a99f5bd5c9e8d42270a03a8022013959632492332200c2908459547bf8dbf97c65ab1a28dec377d6f1d41d3d63e012103d7279dfb90ce17fe139ba60a7c41ddf605b25e1c07a4ddcb9dfef4e7d6710f48feffffff476222484f5e35b3f0e43f65fc76e21d8be7818dd6a989c160b1e5039b7835fc00000000171600140914414d3c94af70ac7e25407b0689e0baa10c77feffffffa83d954a62568bbc99cc644c62eb7383d7c2a2563041a0aeb891a6a4055895570000000017160014795d04cc2d4f31480d9a3710993fbd80d04301dffeffffff06fef72f000000000017a91476fd7035cd26f1a32a5ab979e056713aac25796887a5000f00000000001976a914b8332d502a529571c6af4be66399cd33379071c588ac3fda0500000000001976a914fc1d692f8de10ae33295f090bea5fe49527d975c88ac522e1b00000000001976a914808406b54d1044c429ac54c0e189b0d8061667e088ac6eb68501000000001976a914dfab6085f3a8fb3e6710206a5a959313c5618f4d88acbba20000000000001976a914eb3026552d7e3f3073457d0bee5d4757de48160d88ac0002483045022100bee24b63212939d33d513e767bc79300051f7a0d433c3fcf1e0e3bf03b9eb1d70220588dc45a9ce3a939103b4459ce47500b64e23ab118dfc03c9caa7d6bfc32b9c601210354fd80328da0f9ae6eef2b3a81f74f9a6f66761fadf96f1d1d22b1fd6845876402483045022100e29c7e3a5efc10da6269e5fc20b6a1cb8beb92130cc52c67e46ef40aaa5cac5f0220644dd1b049727d991aece98a105563416e10a5ac4221abac7d16931842d5c322012103960b87412d6e169f30e12106bdf70122aabb9eb61f455518322a18b920a4dfa887d30700") let mut spending: Transaction = deserialize(Vec::from_hex("020000000001031cfbc8f54fbfa4a33a30068841371f80dbfe166211242213188428f437445c91000000006a47304402206fbcec8d2d2e740d824d3d36cc345b37d9f65d665a99f5bd5c9e8d42270a03a8022013959632492332200c2908459547bf8dbf97c65ab1a28dec377d6f1d41d3d63e012103d7279dfb90ce17fe139ba60a7c41ddf605b25e1c07a4ddcb9dfef4e7d6710f48feffffff476222484f5e35b3f0e43f65fc76e21d8be7818dd6a989c160b1e5039b7835fc00000000171600140914414d3c94af70ac7e25407b0689e0baa10c77feffffffa83d954a62568bbc99cc644c62eb7383d7c2a2563041a0aeb891a6a4055895570000000017160014795d04cc2d4f31480d9a3710993fbd80d04301dffeffffff06fef72f000000000017a91476fd7035cd26f1a32a5ab979e056713aac25796887a5000f00000000001976a914b8332d502a529571c6af4be66399cd33379071c588ac3fda0500000000001976a914fc1d692f8de10ae33295f090bea5fe49527d975c88ac522e1b00000000001976a914808406b54d1044c429ac54c0e189b0d8061667e088ac6eb68501000000001976a914dfab6085f3a8fb3e6710206a5a959313c5618f4d88acbba20000000000001976a914eb3026552d7e3f3073457d0bee5d4757de48160d88ac0002483045022100bee24b63212939d33d513e767bc79300051f7a0d433c3fcf1e0e3bf03b9eb1d70220588dc45a9ce3a939103b4459ce47500b64e23ab118dfc03c9caa7d6bfc32b9c601210354fd80328da0f9ae6eef2b3a81f74f9a6f66761fadf96f1d1d22b1fd6845876402483045022100e29c7e3a5efc10da6269e5fc20b6a1cb8beb92130cc52c67e46ef40aaa5cac5f0220644dd1b049727d991aece98a105563416e10a5ac4221abac7d16931842d5c322012103960b87412d6e169f30e12106bdf70122aabb9eb61f455518322a18b920a4dfa887d30700")
.unwrap().as_slice()).unwrap(); .unwrap().as_slice()).unwrap();

View File

@ -29,22 +29,23 @@
//! big-endian decimals, etc.) //! big-endian decimals, etc.)
//! //!
use prelude::*;
use core::{fmt, mem, u32, convert::From}; use core::{fmt, mem, u32, convert::From};
use std::borrow::Cow; #[cfg(feature = "std")] use std::error;
use std::error;
use hashes::hex::ToHex;
use hashes::{sha256d, Hash}; use hashes::{sha256d, Hash};
use hash_types::{BlockHash, FilterHash, TxMerkleNode, FilterHeader}; use hash_types::{BlockHash, FilterHash, TxMerkleNode, FilterHeader};
use io::{self, Cursor, Read, Write}; use io::{self, Cursor, Read};
use util::endian; use util::endian;
use util::psbt; use util::psbt;
use hashes::hex::ToHex;
use blockdata::transaction::{TxOut, Transaction, TxIn}; use blockdata::transaction::{TxOut, Transaction, TxIn};
use network::message_blockdata::Inventory; #[cfg(feature = "std")]
use network::address::{Address, AddrV2Message}; use network::{message_blockdata::Inventory, address::{Address, AddrV2Message}};
/// Encoding error /// Encoding error
#[derive(Debug)] #[derive(Debug)]
@ -104,7 +105,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error { #[cfg(feature = "std")]
impl ::std::error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
Error::Io(ref e) => Some(e), Error::Io(ref e) => Some(e),
@ -240,7 +242,7 @@ macro_rules! decoder_fn {
($name:ident, $val_type:ty, $readfn:ident, $byte_len: expr) => { ($name:ident, $val_type:ty, $readfn:ident, $byte_len: expr) => {
#[inline] #[inline]
fn $name(&mut self) -> Result<$val_type, Error> { fn $name(&mut self) -> Result<$val_type, Error> {
debug_assert_eq!(::std::mem::size_of::<$val_type>(), $byte_len); // size_of isn't a constfn in 1.22 debug_assert_eq!(::core::mem::size_of::<$val_type>(), $byte_len); // size_of isn't a constfn in 1.22
let mut val = [0; $byte_len]; let mut val = [0; $byte_len];
self.read_exact(&mut val[..]).map_err(Error::Io)?; self.read_exact(&mut val[..]).map_err(Error::Io)?;
Ok(endian::$readfn(&val)) Ok(endian::$readfn(&val))
@ -248,7 +250,7 @@ macro_rules! decoder_fn {
} }
} }
impl<W: Write> WriteExt for W { impl<W: io::Write> WriteExt for W {
encoder_fn!(emit_u64, u64, u64_to_array_le); encoder_fn!(emit_u64, u64, u64_to_array_le);
encoder_fn!(emit_u32, u32, u32_to_array_le); encoder_fn!(emit_u32, u32, u32_to_array_le);
encoder_fn!(emit_u16, u16, u16_to_array_le); encoder_fn!(emit_u16, u16, u16_to_array_le);
@ -591,11 +593,12 @@ impl_vec!(TxMerkleNode);
impl_vec!(Transaction); impl_vec!(Transaction);
impl_vec!(TxOut); impl_vec!(TxOut);
impl_vec!(TxIn); impl_vec!(TxIn);
impl_vec!(Inventory);
impl_vec!(Vec<u8>); impl_vec!(Vec<u8>);
impl_vec!((u32, Address));
impl_vec!(u64); impl_vec!(u64);
impl_vec!(AddrV2Message);
#[cfg(feature = "std")] impl_vec!(Inventory);
#[cfg(feature = "std")] impl_vec!((u32, Address));
#[cfg(feature = "std")] impl_vec!(AddrV2Message);
fn consensus_encode_with_size<S: io::Write>(data: &[u8], mut s: S) -> Result<usize, io::Error> { fn consensus_encode_with_size<S: io::Write>(data: &[u8], mut s: S) -> Result<usize, io::Error> {
let vi_len = VarInt(data.len() as u64).consensus_encode(&mut s)?; let vi_len = VarInt(data.len() as u64).consensus_encode(&mut s)?;
@ -694,13 +697,13 @@ impl<'a, T: Encodable> Encodable for &'a mut T {
} }
} }
impl<T: Encodable> Encodable for ::std::rc::Rc<T> { impl<T: Encodable> Encodable for rc::Rc<T> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> { fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
(&**self).consensus_encode(s) (&**self).consensus_encode(s)
} }
} }
impl<T: Encodable> Encodable for ::std::sync::Arc<T> { impl<T: Encodable> Encodable for sync::Arc<T> {
fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> { fn consensus_encode<S: io::Write>(&self, s: S) -> Result<usize, io::Error> {
(&**self).consensus_encode(s) (&**self).consensus_encode(s)
} }
@ -763,8 +766,8 @@ mod tests {
use consensus::{Encodable, deserialize_partial, Decodable}; use consensus::{Encodable, deserialize_partial, Decodable};
use util::endian::{u64_to_array_le, u32_to_array_le, u16_to_array_le}; use util::endian::{u64_to_array_le, u32_to_array_le, u16_to_array_le};
use secp256k1::rand::{thread_rng, Rng}; use secp256k1::rand::{thread_rng, Rng};
use network::message_blockdata::Inventory; #[cfg(feature = "std")]
use network::Address; use network::{Address, message_blockdata::Inventory};
#[test] #[test]
fn serialize_int_test() { fn serialize_int_test() {
@ -839,7 +842,7 @@ mod tests {
} }
fn test_varint_len(varint: VarInt, expected: usize) { fn test_varint_len(varint: VarInt, expected: usize) {
let mut encoder = io::Cursor::new(vec![]); let mut encoder = vec![];
assert_eq!(varint.consensus_encode(&mut encoder).unwrap(), expected); assert_eq!(varint.consensus_encode(&mut encoder).unwrap(), expected);
assert_eq!(varint.len(), expected); assert_eq!(varint.len(), expected);
} }
@ -970,10 +973,12 @@ mod tests {
test_len_is_max_vec::<Transaction>(); test_len_is_max_vec::<Transaction>();
test_len_is_max_vec::<TxOut>(); test_len_is_max_vec::<TxOut>();
test_len_is_max_vec::<TxIn>(); test_len_is_max_vec::<TxIn>();
test_len_is_max_vec::<Inventory>();
test_len_is_max_vec::<Vec<u8>>(); test_len_is_max_vec::<Vec<u8>>();
test_len_is_max_vec::<(u32, Address)>();
test_len_is_max_vec::<u64>(); test_len_is_max_vec::<u64>();
#[cfg(feature = "std")]
test_len_is_max_vec::<(u32, Address)>();
#[cfg(feature = "std")]
test_len_is_max_vec::<Inventory>();
} }
fn test_len_is_max_vec<T>() where Vec<T>: Decodable, T: fmt::Debug { fn test_len_is_max_vec<T>() where Vec<T>: Decodable, T: fmt::Debug {
@ -988,7 +993,7 @@ mod tests {
assert_eq!(deserialize(&[6u8, 0x41, 0x6e, 0x64, 0x72, 0x65, 0x77]).ok(), Some("Andrew".to_string())); assert_eq!(deserialize(&[6u8, 0x41, 0x6e, 0x64, 0x72, 0x65, 0x77]).ok(), Some("Andrew".to_string()));
assert_eq!( assert_eq!(
deserialize(&[6u8, 0x41, 0x6e, 0x64, 0x72, 0x65, 0x77]).ok(), deserialize(&[6u8, 0x41, 0x6e, 0x64, 0x72, 0x65, 0x77]).ok(),
Some(::std::borrow::Cow::Borrowed("Andrew")) Some(Cow::Borrowed("Andrew"))
); );
} }

View File

@ -162,7 +162,7 @@ macro_rules! display_from_debug {
macro_rules! hex_script (($s:expr) => (<$crate::Script as ::core::str::FromStr>::from_str($s).unwrap())); macro_rules! hex_script (($s:expr) => (<$crate::Script as ::core::str::FromStr>::from_str($s).unwrap()));
#[cfg(test)] #[cfg(test)]
macro_rules! hex_hash (($h:ident, $s:expr) => ($h::from_slice(&<Vec<u8> as $crate::hashes::hex::FromHex>::from_hex($s).unwrap()).unwrap())); macro_rules! hex_hash (($h:ident, $s:expr) => ($h::from_slice(&<$crate::prelude::Vec<u8> as $crate::hashes::hex::FromHex>::from_hex($s).unwrap()).unwrap()));
macro_rules! serde_string_impl { macro_rules! serde_string_impl {
($name:ident, $expecting:expr) => { ($name:ident, $expecting:expr) => {
@ -563,10 +563,14 @@ macro_rules! user_enum {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
$($txt => Ok($name::$elem)),*, $($txt => Ok($name::$elem)),*,
_ => Err($crate::io::Error::new( _ => {
#[cfg(not(feature = "std"))] let message = "Unknown network";
#[cfg(feature = "std")] let message = format!("Unknown network (type {})", s);
Err($crate::io::Error::new(
$crate::io::ErrorKind::InvalidInput, $crate::io::ErrorKind::InvalidInput,
format!("Unknown network (type {})", s), message,
)), ))
}
} }
} }
} }
@ -607,7 +611,7 @@ macro_rules! user_enum {
self.visit_str(v) self.visit_str(v)
} }
fn visit_string<E>(self, v: String) -> Result<Self::Value, E> fn visit_string<E>(self, v: $crate::prelude::String) -> Result<Self::Value, E>
where where
E: $crate::serde::de::Error, E: $crate::serde::de::Error,
{ {

View File

@ -23,6 +23,8 @@
//! software. //! software.
//! //!
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
// Experimental features we need // Experimental features we need
#![cfg_attr(all(test, feature = "unstable"), feature(test))] #![cfg_attr(all(test, feature = "unstable"), feature(test))]
@ -38,12 +40,26 @@
#![deny(unused_must_use)] #![deny(unused_must_use)]
#![deny(broken_intra_doc_links)] #![deny(broken_intra_doc_links)]
extern crate core; #[cfg(not(any(feature = "std", feature = "no-std")))]
compile_error!("at least one of the `std` or `no-std` features must be enabled");
#[cfg(feature = "no-std")]
#[macro_use]
extern crate alloc;
#[cfg(feature = "no-std")]
extern crate core2;
#[cfg(any(feature = "std", test))]
extern crate core; // for Rust 1.29 and no-std tests
// Re-exported dependencies. // Re-exported dependencies.
#[macro_use] pub extern crate bitcoin_hashes as hashes; #[macro_use] pub extern crate bitcoin_hashes as hashes;
pub extern crate secp256k1; pub extern crate secp256k1;
pub extern crate bech32; pub extern crate bech32;
#[cfg(feature = "no-std")]
extern crate hashbrown;
#[cfg(feature = "base64")] pub extern crate base64; #[cfg(feature = "base64")] pub extern crate base64;
#[cfg(feature="bitcoinconsensus")] extern crate bitcoinconsensus; #[cfg(feature="bitcoinconsensus")] extern crate bitcoinconsensus;
@ -98,14 +114,68 @@ pub use util::ecdsa::PrivateKey;
#[deprecated(since = "0.26.1", note = "Please use `ecdsa::PublicKey` instead")] #[deprecated(since = "0.26.1", note = "Please use `ecdsa::PublicKey` instead")]
pub use util::ecdsa::PublicKey; pub use util::ecdsa::PublicKey;
#[cfg(feature = "std")]
use std::io; use std::io;
#[cfg(not(feature = "std"))]
use core2::io;
#[cfg(not(feature = "std"))]
mod io_extras {
/// A writer which will move data into the void.
pub struct Sink {
_priv: (),
}
/// Creates an instance of a writer which will successfully consume all data.
pub const fn sink() -> Sink {
Sink { _priv: () }
}
impl core2::io::Write for Sink {
#[inline]
fn write(&mut self, buf: &[u8]) -> core2::io::Result<usize> {
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> core2::io::Result<()> {
Ok(())
}
}
}
mod prelude {
#[cfg(all(not(feature = "std"), not(test)))]
pub use alloc::{string::{String, ToString}, vec::Vec, boxed::Box, borrow::{Cow, ToOwned}, slice, rc, sync};
#[cfg(any(feature = "std", test))]
pub use std::{string::{String, ToString}, vec::Vec, boxed::Box, borrow::{Cow, ToOwned}, slice, rc, sync};
#[cfg(all(not(feature = "std"), not(test)))]
pub use alloc::collections::{BTreeMap, btree_map};
#[cfg(any(feature = "std", test))]
pub use std::collections::{BTreeMap, btree_map};
#[cfg(feature = "std")]
pub use std::io::sink;
#[cfg(not(feature = "std"))]
pub use io_extras::sink;
#[cfg(feature = "hashbrown")]
pub use hashbrown::HashSet;
#[cfg(not(feature = "hashbrown"))]
pub use std::collections::HashSet;
}
#[cfg(all(test, feature = "unstable"))] use tests::EmptyWrite; #[cfg(all(test, feature = "unstable"))] use tests::EmptyWrite;
#[cfg(all(test, feature = "unstable"))] #[cfg(all(test, feature = "unstable"))]
mod tests { mod tests {
use std::io::{IoSlice, Result, Write}; use core::fmt::Arguments;
use std::fmt::Arguments; use io::{IoSlice, Result, Write};
#[derive(Default, Clone, Debug, PartialEq, Eq)] #[derive(Default, Clone, Debug, PartialEq, Eq)]
pub struct EmptyWrite; pub struct EmptyWrite;

View File

@ -17,6 +17,7 @@
//! This module defines the structures and functions needed to encode //! This module defines the structures and functions needed to encode
//! network addresses in Bitcoin messages. //! network addresses in Bitcoin messages.
//! //!
use prelude::*;
use core::{fmt, iter}; use core::{fmt, iter};
use std::net::{SocketAddr, Ipv6Addr, SocketAddrV4, SocketAddrV6, Ipv4Addr, ToSocketAddrs}; use std::net::{SocketAddr, Ipv6Addr, SocketAddrV4, SocketAddrV6, Ipv4Addr, ToSocketAddrs};

View File

@ -19,8 +19,9 @@
//! also defines (de)serialization routines for many primitives. //! also defines (de)serialization routines for many primitives.
//! //!
use prelude::*;
use core::{mem, fmt, iter}; use core::{mem, fmt, iter};
use std::borrow::Cow;
use io; use io;
use blockdata::block; use blockdata::block;

View File

@ -18,6 +18,8 @@
//! Bitcoin data (blocks and transactions) around. //! Bitcoin data (blocks and transactions) around.
//! //!
use prelude::*;
use io; use io;
use hashes::sha256d; use hashes::sha256d;
@ -149,7 +151,7 @@ impl_consensus_encoding!(GetHeadersMessage, version, locator_hashes, stop_hash);
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{GetHeadersMessage, GetBlocksMessage}; use super::{Vec, GetHeadersMessage, GetBlocksMessage};
use hashes::hex::FromHex; use hashes::hex::FromHex;

View File

@ -18,8 +18,9 @@
//! capabilities //! capabilities
//! //!
use prelude::*;
use io; use io;
use std::borrow::Cow;
use network::address::Address; use network::address::Address;
use network::constants::{self, ServiceFlags}; use network::constants::{self, ServiceFlags};

View File

@ -20,17 +20,17 @@
use io; use io;
use core::fmt; use core::fmt;
use std::error; #[cfg(feature = "std")] use std::error;
pub mod constants; pub mod constants;
pub mod address; #[cfg(feature = "std")] pub mod address;
pub use self::address::Address; #[cfg(feature = "std")] pub use self::address::Address;
pub mod message; #[cfg(feature = "std")] pub mod message;
pub mod message_blockdata; #[cfg(feature = "std")] pub mod message_blockdata;
pub mod message_network; #[cfg(feature = "std")] pub mod message_network;
pub mod message_filter; #[cfg(feature = "std")] pub mod message_filter;
pub mod stream_reader; #[cfg(feature = "std")] pub mod stream_reader;
/// Network error /// Network error
#[derive(Debug)] #[derive(Debug)]
@ -60,8 +60,8 @@ impl From<io::Error> for Error {
} }
} }
#[cfg(feature = "std")]
impl error::Error for Error { impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
Error::Io(ref e) => Some(e), Error::Io(ref e) => Some(e),

View File

@ -20,6 +20,8 @@
//! (like can happen with reading from TCP socket) //! (like can happen with reading from TCP socket)
//! //!
use prelude::*;
use core::fmt; use core::fmt;
use io::{self, Read}; use io::{self, Read};

View File

@ -7,14 +7,14 @@ pub mod btreemap_byte_values {
// NOTE: This module can be exactly copied to use with HashMap. // NOTE: This module can be exactly copied to use with HashMap.
use ::std::collections::BTreeMap; use prelude::*;
use hashes::hex::{FromHex, ToHex}; use hashes::hex::{FromHex, ToHex};
use serde; use serde;
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where -> Result<S::Ok, S::Error> where
S: serde::Serializer, S: serde::Serializer,
T: serde::Serialize + ::std::hash::Hash + Eq + Ord, T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
{ {
use serde::ser::SerializeMap; use serde::ser::SerializeMap;
@ -33,17 +33,17 @@ pub mod btreemap_byte_values {
pub fn deserialize<'de, D, T>(d: D) pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where -> Result<BTreeMap<T, Vec<u8>>, D::Error> where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{ {
use ::std::marker::PhantomData; use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>); struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{ {
type Value = BTreeMap<T, Vec<u8>>; type Value = BTreeMap<T, Vec<u8>>;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
write!(f, "a map with hexadecimal values") write!(f, "a map with hexadecimal values")
} }
@ -74,13 +74,13 @@ pub mod btreemap_as_seq {
// NOTE: This module can be exactly copied to use with HashMap. // NOTE: This module can be exactly copied to use with HashMap.
use ::std::collections::BTreeMap; use prelude::*;
use serde; use serde;
pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S) pub fn serialize<S, T, U>(v: &BTreeMap<T, U>, s: S)
-> Result<S::Ok, S::Error> where -> Result<S::Ok, S::Error> where
S: serde::Serializer, S: serde::Serializer,
T: serde::Serialize + ::std::hash::Hash + Eq + Ord, T: serde::Serialize + ::core::hash::Hash + Eq + Ord,
U: serde::Serialize, U: serde::Serialize,
{ {
use serde::ser::SerializeSeq; use serde::ser::SerializeSeq;
@ -100,19 +100,19 @@ pub mod btreemap_as_seq {
pub fn deserialize<'de, D, T, U>(d: D) pub fn deserialize<'de, D, T, U>(d: D)
-> Result<BTreeMap<T, U>, D::Error> where -> Result<BTreeMap<T, U>, D::Error> where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>, U: serde::Deserialize<'de>,
{ {
use ::std::marker::PhantomData; use ::core::marker::PhantomData;
struct Visitor<T, U>(PhantomData<(T, U)>); struct Visitor<T, U>(PhantomData<(T, U)>);
impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U> where impl<'de, T, U> serde::de::Visitor<'de> for Visitor<T, U> where
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
U: serde::Deserialize<'de>, U: serde::Deserialize<'de>,
{ {
type Value = BTreeMap<T, U>; type Value = BTreeMap<T, U>;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
write!(f, "a sequence of pairs") write!(f, "a sequence of pairs")
} }
@ -143,7 +143,7 @@ pub mod btreemap_as_seq_byte_values {
// NOTE: This module can be exactly copied to use with HashMap. // NOTE: This module can be exactly copied to use with HashMap.
use ::std::collections::BTreeMap; use prelude::*;
use serde; use serde;
/// A custom key-value pair type that serialized the bytes as hex. /// A custom key-value pair type that serialized the bytes as hex.
@ -165,7 +165,7 @@ pub mod btreemap_as_seq_byte_values {
pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S) pub fn serialize<S, T>(v: &BTreeMap<T, Vec<u8>>, s: S)
-> Result<S::Ok, S::Error> where -> Result<S::Ok, S::Error> where
S: serde::Serializer, S: serde::Serializer,
T: serde::Serialize + ::std::hash::Hash + Eq + Ord + 'static, T: serde::Serialize + ::core::hash::Hash + Eq + Ord + 'static,
{ {
use serde::ser::SerializeSeq; use serde::ser::SerializeSeq;
@ -184,17 +184,17 @@ pub mod btreemap_as_seq_byte_values {
pub fn deserialize<'de, D, T>(d: D) pub fn deserialize<'de, D, T>(d: D)
-> Result<BTreeMap<T, Vec<u8>>, D::Error> where -> Result<BTreeMap<T, Vec<u8>>, D::Error> where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{ {
use ::std::marker::PhantomData; use ::core::marker::PhantomData;
struct Visitor<T>(PhantomData<T>); struct Visitor<T>(PhantomData<T>);
impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where impl<'de, T> serde::de::Visitor<'de> for Visitor<T> where
T: serde::Deserialize<'de> + ::std::hash::Hash + Eq + Ord, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord,
{ {
type Value = BTreeMap<T, Vec<u8>>; type Value = BTreeMap<T, Vec<u8>>;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
write!(f, "a sequence of pairs") write!(f, "a sequence of pairs")
} }
@ -239,19 +239,19 @@ pub mod hex_bytes {
pub fn deserialize<'de, D, B>(d: D) -> Result<B, D::Error> pub fn deserialize<'de, D, B>(d: D) -> Result<B, D::Error>
where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex, where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex,
{ {
struct Visitor<B>(::std::marker::PhantomData<B>); struct Visitor<B>(::core::marker::PhantomData<B>);
impl<'de, B: FromHex> serde::de::Visitor<'de> for Visitor<B> { impl<'de, B: FromHex> serde::de::Visitor<'de> for Visitor<B> {
type Value = B; type Value = B;
fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, formatter: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
formatter.write_str("an ASCII hex string") formatter.write_str("an ASCII hex string")
} }
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where E: serde::de::Error, where E: serde::de::Error,
{ {
if let Ok(hex) = ::std::str::from_utf8(v) { if let Ok(hex) = ::core::str::from_utf8(v) {
FromHex::from_hex(hex).map_err(E::custom) FromHex::from_hex(hex).map_err(E::custom)
} else { } else {
return Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)); return Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self));
@ -269,7 +269,7 @@ pub mod hex_bytes {
if !d.is_human_readable() { if !d.is_human_readable() {
serde::Deserialize::deserialize(d) serde::Deserialize::deserialize(d)
} else { } else {
d.deserialize_str(Visitor(::std::marker::PhantomData)) d.deserialize_str(Visitor(::core::marker::PhantomData))
} }
} }
} }

View File

@ -33,9 +33,11 @@
//! let address = Address::p2pkh(&public_key, Network::Bitcoin); //! let address = Address::p2pkh(&public_key, Network::Bitcoin);
//! ``` //! ```
use prelude::*;
use core::fmt; use core::fmt;
use core::str::FromStr; use core::str::FromStr;
use std::error; #[cfg(feature = "std")] use std::error;
use bech32; use bech32;
use hashes::Hash; use hashes::Hash;
@ -84,7 +86,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error { #[cfg(feature = "std")]
impl ::std::error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
Error::Base58(ref e) => Some(e), Error::Base58(ref e) => Some(e),
@ -524,8 +527,8 @@ impl FromStr for Address {
} }
} }
impl ::std::fmt::Debug for Address { impl fmt::Debug for Address {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_string()) write!(f, "{}", self.to_string())
} }
} }

View File

@ -14,6 +14,8 @@
//! We refer to the documentation on the types for more information. //! We refer to the documentation on the types for more information.
//! //!
use prelude::*;
use core::{ops, default, str::FromStr, cmp::Ordering}; use core::{ops, default, str::FromStr, cmp::Ordering};
use core::fmt::{self, Write}; use core::fmt::{self, Write};
@ -111,6 +113,7 @@ impl fmt::Display for ParseAmountError {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for ParseAmountError {} impl ::std::error::Error for ParseAmountError {}
fn is_too_precise(s: &str, precision: usize) -> bool { fn is_too_precise(s: &str, precision: usize) -> bool {
@ -972,8 +975,8 @@ pub mod serde {
use serde::{Deserializer, Serializer, de}; use serde::{Deserializer, Serializer, de};
use util::amount::serde::SerdeAmountForOpt; use util::amount::serde::SerdeAmountForOpt;
use std::fmt; use core::fmt;
use std::marker::PhantomData; use core::marker::PhantomData;
pub fn serialize<A: SerdeAmountForOpt, S: Serializer>( pub fn serialize<A: SerdeAmountForOpt, S: Serializer>(
a: &Option<A>, a: &Option<A>,
@ -1035,8 +1038,8 @@ pub mod serde {
use serde::{Deserializer, Serializer, de}; use serde::{Deserializer, Serializer, de};
use util::amount::serde::SerdeAmountForOpt; use util::amount::serde::SerdeAmountForOpt;
use std::fmt; use core::fmt;
use std::marker::PhantomData; use core::marker::PhantomData;
pub fn serialize<A: SerdeAmountForOpt, S: Serializer>( pub fn serialize<A: SerdeAmountForOpt, S: Serializer>(
a: &Option<A>, a: &Option<A>,
@ -1081,6 +1084,7 @@ pub mod serde {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[cfg(feature = "std")]
use std::panic; use std::panic;
use core::str::FromStr; use core::str::FromStr;
@ -1113,7 +1117,11 @@ mod tests {
assert_eq!(b, ssat(10)); assert_eq!(b, ssat(10));
b %= 3; b %= 3;
assert_eq!(b, ssat(1)); assert_eq!(b, ssat(1));
}
#[cfg(feature = "std")]
#[test]
fn test_overflows() {
// panic on overflow // panic on overflow
let result = panic::catch_unwind(|| Amount::max_value() + Amount::from_sat(1)); let result = panic::catch_unwind(|| Amount::max_value() + Amount::from_sat(1));
assert!(result.is_err()); assert!(result.is_err());

View File

@ -14,7 +14,8 @@
//! Base58 encoder and decoder //! Base58 encoder and decoder
use std::error; use prelude::*;
use core::{fmt, str, iter, slice}; use core::{fmt, str, iter, slice};
use hashes::{sha256d, Hash}; use hashes::{sha256d, Hash};
@ -57,7 +58,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error {} #[cfg(feature = "std")]
impl ::std::error::Error for Error {}
/// Vector-like object that holds the first 100 elements on the stack. If more space is needed it /// Vector-like object that holds the first 100 elements on the stack. If more space is needed it
/// will be allocated on the heap. /// will be allocated on the heap.

View File

@ -25,6 +25,8 @@ use blockdata::script::Script;
use blockdata::transaction::{Transaction, TxIn, SigHashType}; use blockdata::transaction::{Transaction, TxIn, SigHashType};
use consensus::{encode, Encodable}; use consensus::{encode, Encodable};
use prelude::*;
use io; use io;
use core::ops::{Deref, DerefMut}; use core::ops::{Deref, DerefMut};

View File

@ -45,7 +45,8 @@
//! ``` //! ```
//! //!
use std::collections::HashSet; use prelude::*;
use io::{self as io, Cursor}; use io::{self as io, Cursor};
use core::fmt::{self, Display, Formatter}; use core::fmt::{self, Display, Formatter};
use core::cmp::{self, Ordering}; use core::cmp::{self, Ordering};
@ -73,6 +74,7 @@ pub enum Error {
Io(io::Error), Io(io::Error),
} }
#[cfg(feature = "std")]
impl ::std::error::Error for Error {} impl ::std::error::Error for Error {}
impl Display for Error { impl Display for Error {
@ -123,14 +125,14 @@ impl BlockFilter {
/// Compute a SCRIPT_FILTER that contains spent and output scripts /// Compute a SCRIPT_FILTER that contains spent and output scripts
pub fn new_script_filter<M>(block: &Block, script_for_coin: M) -> Result<BlockFilter, Error> pub fn new_script_filter<M>(block: &Block, script_for_coin: M) -> Result<BlockFilter, Error>
where M: Fn(&OutPoint) -> Result<Script, Error> { where M: Fn(&OutPoint) -> Result<Script, Error> {
let mut out = Cursor::new(Vec::new()); let mut out = Vec::new();
{ {
let mut writer = BlockFilterWriter::new(&mut out, block); let mut writer = BlockFilterWriter::new(&mut out, block);
writer.add_output_scripts(); writer.add_output_scripts();
writer.add_input_scripts(script_for_coin)?; writer.add_input_scripts(script_for_coin)?;
writer.finish()?; writer.finish()?;
} }
Ok(BlockFilter { content: out.into_inner() }) Ok(BlockFilter { content: out })
} }
/// match any query pattern /// match any query pattern
@ -361,9 +363,9 @@ impl<'a> GCSFilterWriter<'a> {
mapped.sort(); mapped.sort();
// write number of elements as varint // write number of elements as varint
let mut encoder = io::Cursor::new(Vec::new()); let mut encoder = Vec::new();
VarInt(mapped.len() as u64).consensus_encode(&mut encoder).unwrap(); VarInt(mapped.len() as u64).consensus_encode(&mut encoder).unwrap();
let mut wrote = self.writer.write(encoder.into_inner().as_slice())?; let mut wrote = self.writer.write(encoder.as_slice())?;
// write out deltas of sorted values into a Golonb-Rice coded bit stream // write out deltas of sorted values into a Golonb-Rice coded bit stream
let mut writer = BitStreamWriter::new(self.writer); let mut writer = BitStreamWriter::new(self.writer);
@ -508,8 +510,7 @@ impl<'a> BitStreamWriter<'a> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::collections::{HashSet, HashMap}; use io::Cursor;
use std::io::Cursor;
use hash_types::BlockHash; use hash_types::BlockHash;
use hashes::hex::FromHex; use hashes::hex::FromHex;
@ -520,6 +521,7 @@ mod test {
use self::serde_json::{Value}; use self::serde_json::{Value};
use consensus::encode::deserialize; use consensus::encode::deserialize;
use std::collections::HashMap;
#[test] #[test]
fn test_blockfilters() { fn test_blockfilters() {
@ -607,7 +609,7 @@ mod test {
patterns.insert(Vec::from_hex("eeeeee").unwrap()); patterns.insert(Vec::from_hex("eeeeee").unwrap());
patterns.insert(Vec::from_hex("ffffff").unwrap()); patterns.insert(Vec::from_hex("ffffff").unwrap());
let mut out = Cursor::new(Vec::new()); let mut out = Vec::new();
{ {
let mut writer = GCSFilterWriter::new(&mut out, 0, 0, M, P); let mut writer = GCSFilterWriter::new(&mut out, 0, 0, M, P);
for p in &patterns { for p in &patterns {
@ -616,7 +618,7 @@ mod test {
writer.finish().unwrap(); writer.finish().unwrap();
} }
let bytes = out.into_inner(); let bytes = out;
{ {
let mut query = Vec::new(); let mut query = Vec::new();
@ -659,7 +661,7 @@ mod test {
#[test] #[test]
fn test_bit_stream() { fn test_bit_stream() {
let mut out = Cursor::new(Vec::new()); let mut out = Vec::new();
{ {
let mut writer = BitStreamWriter::new(&mut out); let mut writer = BitStreamWriter::new(&mut out);
writer.write(0, 1).unwrap(); // 0 writer.write(0, 1).unwrap(); // 0
@ -671,7 +673,7 @@ mod test {
writer.write(7, 7).unwrap(); // 0000111 writer.write(7, 7).unwrap(); // 0000111
writer.flush().unwrap(); writer.flush().unwrap();
} }
let bytes = out.into_inner(); let bytes = out;
assert_eq!("01011010110000110000000001110000", format!("{:08b}{:08b}{:08b}{:08b}", bytes[0], bytes[1], bytes[2], bytes[3])); assert_eq!("01011010110000110000000001110000", format!("{:08b}{:08b}{:08b}{:08b}", bytes[0], bytes[1], bytes[2], bytes[3]));
{ {
let mut input = Cursor::new(bytes); let mut input = Cursor::new(bytes);

View File

@ -16,8 +16,10 @@
//! Implementation of BIP32 hierarchical deterministic wallets, as defined //! Implementation of BIP32 hierarchical deterministic wallets, as defined
//! at <https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki> //! at <https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki>
use prelude::*;
use core::{fmt, str::FromStr, default::Default}; use core::{fmt, str::FromStr, default::Default};
use std::error; #[cfg(feature = "std")] use std::error;
#[cfg(feature = "serde")] use serde; #[cfg(feature = "serde")] use serde;
use hash_types::XpubIdentifier; use hash_types::XpubIdentifier;
@ -274,7 +276,7 @@ impl ::core::iter::FromIterator<ChildNumber> for DerivationPath {
impl<'a> ::core::iter::IntoIterator for &'a DerivationPath { impl<'a> ::core::iter::IntoIterator for &'a DerivationPath {
type Item = &'a ChildNumber; type Item = &'a ChildNumber;
type IntoIter = ::std::slice::Iter<'a, ChildNumber>; type IntoIter = slice::Iter<'a, ChildNumber>;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
self.0.iter() self.0.iter()
} }
@ -457,6 +459,7 @@ impl fmt::Display for Error {
} }
} }
#[cfg(feature = "std")]
impl error::Error for Error { impl error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
if let Error::Ecdsa(ref e) = *self { if let Error::Ecdsa(ref e) = *self {

View File

@ -20,8 +20,10 @@
#![cfg_attr(not(test), deprecated)] #![cfg_attr(not(test), deprecated)]
use prelude::*;
use core::fmt; use core::fmt;
use std::error; #[cfg(feature = "std")] use std::error;
use secp256k1::{self, Secp256k1}; use secp256k1::{self, Secp256k1};
use PrivateKey; use PrivateKey;
@ -72,6 +74,7 @@ impl fmt::Display for Error {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for Error { impl ::std::error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {

View File

@ -16,6 +16,8 @@
//! ECDSA keys used in Bitcoin that can be roundtrip (de)serialized. //! ECDSA keys used in Bitcoin that can be roundtrip (de)serialized.
//! //!
use prelude::*;
use core::{ops, str::FromStr}; use core::{ops, str::FromStr};
use core::fmt::{self, Write as _fmtWrite}; use core::fmt::{self, Write as _fmtWrite};
use io; use io;
@ -100,7 +102,17 @@ impl PublicKey {
}; };
reader.read_exact(&mut bytes[1..])?; reader.read_exact(&mut bytes[1..])?;
Self::from_slice(bytes).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) Self::from_slice(bytes).map_err(|e|{
// Need a static string for core2
#[cfg(feature = "std")]
let reason = e;
#[cfg(not(feature = "std"))]
let reason = match e {
Error::Base58(_) => "base58 error",
Error::Secp256k1(_) => "secp256k1 error",
};
io::Error::new(io::ErrorKind::InvalidData, reason)
})
} }
/// Serialize the public key to bytes /// Serialize the public key to bytes
@ -299,7 +311,7 @@ impl<'de> ::serde::Deserialize<'de> for PrivateKey {
impl<'de> ::serde::de::Visitor<'de> for WifVisitor { impl<'de> ::serde::de::Visitor<'de> for WifVisitor {
type Value = PrivateKey; type Value = PrivateKey;
fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, formatter: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
formatter.write_str("an ASCII WIF string") formatter.write_str("an ASCII WIF string")
} }
@ -307,7 +319,7 @@ impl<'de> ::serde::Deserialize<'de> for PrivateKey {
where where
E: ::serde::de::Error, E: ::serde::de::Error,
{ {
if let Ok(s) = ::std::str::from_utf8(v) { if let Ok(s) = ::core::str::from_utf8(v) {
PrivateKey::from_str(s).map_err(E::custom) PrivateKey::from_str(s).map_err(E::custom)
} else { } else {
Err(E::invalid_value(::serde::de::Unexpected::Bytes(v), &self)) Err(E::invalid_value(::serde::de::Unexpected::Bytes(v), &self))
@ -350,7 +362,7 @@ impl<'de> ::serde::Deserialize<'de> for PublicKey {
impl<'de> ::serde::de::Visitor<'de> for HexVisitor { impl<'de> ::serde::de::Visitor<'de> for HexVisitor {
type Value = PublicKey; type Value = PublicKey;
fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, formatter: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
formatter.write_str("an ASCII hex string") formatter.write_str("an ASCII hex string")
} }
@ -358,7 +370,7 @@ impl<'de> ::serde::Deserialize<'de> for PublicKey {
where where
E: ::serde::de::Error, E: ::serde::de::Error,
{ {
if let Ok(hex) = ::std::str::from_utf8(v) { if let Ok(hex) = ::core::str::from_utf8(v) {
PublicKey::from_str(hex).map_err(E::custom) PublicKey::from_str(hex).map_err(E::custom)
} else { } else {
Err(E::invalid_value(::serde::de::Unexpected::Bytes(v), &self)) Err(E::invalid_value(::serde::de::Unexpected::Bytes(v), &self))
@ -379,7 +391,7 @@ impl<'de> ::serde::Deserialize<'de> for PublicKey {
impl<'de> ::serde::de::Visitor<'de> for BytesVisitor { impl<'de> ::serde::de::Visitor<'de> for BytesVisitor {
type Value = PublicKey; type Value = PublicKey;
fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { fn expecting(&self, formatter: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
formatter.write_str("a bytestring") formatter.write_str("a bytestring")
} }

View File

@ -15,6 +15,8 @@
//! //!
//! Utility functions related to hashing data, including merkleization //! Utility functions related to hashing data, including merkleization
use prelude::*;
use io; use io;
use core::cmp::min; use core::cmp::min;

View File

@ -20,7 +20,7 @@
pub use util::ecdsa::{PrivateKey, PublicKey}; pub use util::ecdsa::{PrivateKey, PublicKey};
use core::fmt; use core::fmt;
use std::error; #[cfg(feature = "std")] use std::error;
use secp256k1; use secp256k1;
use util::base58; use util::base58;
@ -44,7 +44,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error { #[cfg(feature = "std")]
impl ::std::error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
Error::Base58(ref e) => Some(e), Error::Base58(ref e) => Some(e),

View File

@ -51,7 +51,8 @@
//! assert_eq!(1, index.len()); //! assert_eq!(1, index.len());
//! assert_eq!(1, index[0]); //! assert_eq!(1, index[0]);
//! ``` //! ```
use std::collections::HashSet; use prelude::*;
use io; use io;
use hashes::Hash; use hashes::Hash;
@ -392,10 +393,10 @@ pub struct MerkleBlock {
} }
impl MerkleBlock { impl MerkleBlock {
/// Create a MerkleBlock from a block, that should contain proofs for the txids. /// Create a MerkleBlock from a block, that contains proofs for specific txids.
/// ///
/// The `block` is a full block containing the header and transactions and `match_txids` is a /// The `block` is a full block containing the header and transactions and `match_txids` is a
/// set containing the transaction ids that should be included in the partial merkle tree. /// function that returns true for the ids that should be included in the partial merkle tree.
/// ///
/// # Examples /// # Examples
/// ///
@ -420,8 +421,8 @@ impl MerkleBlock {
/// // Create a merkle block containing a single transaction /// // Create a merkle block containing a single transaction
/// let txid = Txid::from_hex( /// let txid = Txid::from_hex(
/// "5a4ebf66822b0b2d56bd9dc64ece0bc38ee7844a23ff1d7320a88c5fdb2ad3e2").unwrap(); /// "5a4ebf66822b0b2d56bd9dc64ece0bc38ee7844a23ff1d7320a88c5fdb2ad3e2").unwrap();
/// let match_txids = vec![txid].into_iter().collect(); /// let match_txids: Vec<Txid> = vec![txid].into_iter().collect();
/// let mb = MerkleBlock::from_block(&block, &match_txids); /// let mb = MerkleBlock::from_block_with_predicate(&block, |t| match_txids.contains(t));
/// ///
/// // Authenticate and extract matched transaction ids /// // Authenticate and extract matched transaction ids
/// let mut matches: Vec<Txid> = vec![]; /// let mut matches: Vec<Txid> = vec![];
@ -429,23 +430,31 @@ impl MerkleBlock {
/// assert!(mb.extract_matches(&mut matches, &mut index).is_ok()); /// assert!(mb.extract_matches(&mut matches, &mut index).is_ok());
/// assert_eq!(txid, matches[0]); /// assert_eq!(txid, matches[0]);
/// ``` /// ```
pub fn from_block(block: &Block, match_txids: &HashSet<Txid>) -> Self { pub fn from_block_with_predicate<F>(block: &Block, match_txids: F) -> Self
where F: Fn(&Txid) -> bool {
let block_txids: Vec<_> = block.txdata.iter().map(Transaction::txid).collect(); let block_txids: Vec<_> = block.txdata.iter().map(Transaction::txid).collect();
Self::from_header_txids(&block.header, &block_txids, match_txids) Self::from_header_txids_with_predicate(&block.header, &block_txids, match_txids)
} }
/// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids. /// Create a MerkleBlock from a block, that contains proofs for specific txids.
#[cfg(feature = "std")]
#[deprecated(since="0.26.2", note="use from_block_with_predicate")]
pub fn from_block(block: &Block, match_txids: &::std::collections::HashSet<Txid>) -> Self {
Self::from_block_with_predicate(block, |t| match_txids.contains(t))
}
/// Create a MerkleBlock from the block's header and txids, that contain proofs for specific txids.
/// ///
/// The `header` is the block header, `block_txids` is the full list of txids included in the block and /// The `header` is the block header, `block_txids` is the full list of txids included in the block and
/// `match_txids` is a set containing the transaction ids that should be included in the partial merkle tree. /// `match_txids` is a function that returns true for the ids that should be included in the partial merkle tree.
pub fn from_header_txids( pub fn from_header_txids_with_predicate<F>(
header: &BlockHeader, header: &BlockHeader,
block_txids: &[Txid], block_txids: &[Txid],
match_txids: &HashSet<Txid>, match_txids: F,
) -> Self { ) -> Self where F: Fn(&Txid) -> bool {
let matches: Vec<bool> = block_txids let matches: Vec<bool> = block_txids
.iter() .iter()
.map(|txid| match_txids.contains(txid)) .map(match_txids)
.collect(); .collect();
let pmt = PartialMerkleTree::from_txids(&block_txids, &matches); let pmt = PartialMerkleTree::from_txids(&block_txids, &matches);
@ -455,6 +464,17 @@ impl MerkleBlock {
} }
} }
/// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids.
#[cfg(feature = "std")]
#[deprecated(since="0.26.2", note="use from_header_txids_with_predicate")]
pub fn from_header_txids(
header: &BlockHeader,
block_txids: &[Txid],
match_txids: &::std::collections::HashSet<Txid>,
) -> Self {
Self::from_header_txids_with_predicate(header, block_txids, |t| match_txids.contains(t))
}
/// Extract the matching txid's represented by this partial merkle tree /// Extract the matching txid's represented by this partial merkle tree
/// and their respective indices within the partial tree. /// and their respective indices within the partial tree.
/// returns Ok(()) on success, or error in case of failure /// returns Ok(()) on success, or error in case of failure
@ -642,9 +662,9 @@ mod tests {
let txid1 = txids[0]; let txid1 = txids[0];
let txid2 = txids[1]; let txid2 = txids[1];
let txids = txids.into_iter().collect(); let txids = vec![txid1, txid2];
let merkle_block = MerkleBlock::from_block(&block, &txids); let merkle_block = MerkleBlock::from_block_with_predicate(&block, |t| txids.contains(t));
assert_eq!(merkle_block.header.block_hash(), block.block_hash()); assert_eq!(merkle_block.header.block_hash(), block.block_hash());
@ -672,12 +692,12 @@ mod tests {
#[test] #[test]
fn merkleblock_construct_from_txids_not_found() { fn merkleblock_construct_from_txids_not_found() {
let block = get_block_13b8a(); let block = get_block_13b8a();
let txids = ["c0ffee00003bafa802c8aa084379aa98d9fcd632ddc2ed9782b586ec87451f20"] let txids: Vec<Txid> = ["c0ffee00003bafa802c8aa084379aa98d9fcd632ddc2ed9782b586ec87451f20"]
.iter() .iter()
.map(|hex| Txid::from_hex(hex).unwrap()) .map(|hex| Txid::from_hex(hex).unwrap())
.collect(); .collect();
let merkle_block = MerkleBlock::from_block(&block, &txids); let merkle_block = MerkleBlock::from_block_with_predicate(&block, |t| txids.contains(t));
assert_eq!(merkle_block.header.block_hash(), block.block_hash()); assert_eq!(merkle_block.header.block_hash(), block.block_hash());

View File

@ -16,6 +16,8 @@
//! //!
//! Various utility functions //! Various utility functions
use prelude::*;
use hashes::{sha256d, Hash, HashEngine}; use hashes::{sha256d, Hash, HashEngine};
use blockdata::opcodes; use blockdata::opcodes;
@ -29,8 +31,9 @@ pub const BITCOIN_SIGNED_MSG_PREFIX: &[u8] = b"\x18Bitcoin Signed Message:\n";
#[cfg(feature = "secp-recovery")] #[cfg(feature = "secp-recovery")]
mod message_signing { mod message_signing {
#[cfg(feature = "base64")] use prelude::*;
use core::fmt; use core::fmt;
use std::error; #[cfg(feature = "std")] use std::error;
use hashes::sha256d; use hashes::sha256d;
use secp256k1; use secp256k1;
@ -60,6 +63,7 @@ mod message_signing {
} }
} }
#[cfg(feature = "std")]
impl error::Error for MessageSignatureError { impl error::Error for MessageSignatureError {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {

View File

@ -35,8 +35,10 @@ pub mod bip158;
pub(crate) mod endian; pub(crate) mod endian;
use prelude::*;
use io;
use core::fmt; use core::fmt;
use std::error; #[cfg(feature = "std")] use std::error;
use network; use network;
use consensus::encode; use consensus::encode;
@ -87,7 +89,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error { #[cfg(feature = "std")]
impl ::std::error::Error for Error {
fn cause(&self) -> Option<&dyn error::Error> { fn cause(&self) -> Option<&dyn error::Error> {
match *self { match *self {
Error::Encode(ref e) => Some(e), Error::Encode(ref e) => Some(e),
@ -110,3 +113,18 @@ impl From<network::Error> for Error {
Error::Network(e) Error::Network(e)
} }
} }
// core2 doesn't have read_to_end
pub(crate) fn read_to_end<D: io::Read>(mut d: D) -> Result<Vec<u8>, io::Error> {
let mut result = vec![];
let mut buf = [0u8; 64];
loop {
match d.read(&mut buf) {
Ok(0) => break,
Ok(n) => result.extend_from_slice(&buf[0..n]),
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {},
Err(e) => return Err(e.into()),
};
}
Ok(result)
}

View File

@ -12,7 +12,8 @@
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// //
use std::error; use prelude::*;
use core::fmt; use core::fmt;
use blockdata::transaction::Transaction; use blockdata::transaction::Transaction;
@ -105,7 +106,8 @@ impl fmt::Display for Error {
} }
} }
impl error::Error for Error {} #[cfg(feature = "std")]
impl ::std::error::Error for Error {}
#[doc(hidden)] #[doc(hidden)]
impl From<hashes::Error> for Error { impl From<hashes::Error> for Error {

View File

@ -14,7 +14,7 @@
#[allow(unused_macros)] #[allow(unused_macros)]
macro_rules! hex_psbt { macro_rules! hex_psbt {
($s:expr) => { $crate::consensus::deserialize::<$crate::util::psbt::PartiallySignedTransaction>(&<Vec<u8> as $crate::hashes::hex::FromHex>::from_hex($s).unwrap()) }; ($s:expr) => { $crate::consensus::deserialize::<$crate::util::psbt::PartiallySignedTransaction>(&<$crate::prelude::Vec<u8> as $crate::hashes::hex::FromHex>::from_hex($s).unwrap()) };
} }
macro_rules! merge { macro_rules! merge {
@ -45,7 +45,7 @@ macro_rules! impl_psbt_deserialize {
macro_rules! impl_psbt_serialize { macro_rules! impl_psbt_serialize {
($thing:ty) => { ($thing:ty) => {
impl $crate::util::psbt::serialize::Serialize for $thing { impl $crate::util::psbt::serialize::Serialize for $thing {
fn serialize(&self) -> Vec<u8> { fn serialize(&self) -> $crate::prelude::Vec<u8> {
$crate::consensus::serialize(self) $crate::consensus::serialize(self)
} }
} }
@ -118,11 +118,11 @@ macro_rules! impl_psbt_insert_pair {
if !$raw_key.key.is_empty() { if !$raw_key.key.is_empty() {
let key_val: $keyed_key_type = $crate::util::psbt::serialize::Deserialize::deserialize(&$raw_key.key)?; let key_val: $keyed_key_type = $crate::util::psbt::serialize::Deserialize::deserialize(&$raw_key.key)?;
match $slf.$keyed_name.entry(key_val) { match $slf.$keyed_name.entry(key_val) {
::std::collections::btree_map::Entry::Vacant(empty_key) => { $crate::prelude::btree_map::Entry::Vacant(empty_key) => {
let val: $keyed_value_type = $crate::util::psbt::serialize::Deserialize::deserialize(&$raw_value)?; let val: $keyed_value_type = $crate::util::psbt::serialize::Deserialize::deserialize(&$raw_value)?;
empty_key.insert(val); empty_key.insert(val);
} }
::std::collections::btree_map::Entry::Occupied(_) => return Err($crate::util::psbt::Error::DuplicateKey($raw_key).into()), $crate::prelude::btree_map::Entry::Occupied(_) => return Err($crate::util::psbt::Error::DuplicateKey($raw_key).into()),
} }
} else { } else {
return Err($crate::util::psbt::Error::InvalidKey($raw_key).into()); return Err($crate::util::psbt::Error::InvalidKey($raw_key).into());
@ -180,7 +180,7 @@ macro_rules! impl_psbt_hash_deserialize {
macro_rules! impl_psbt_hash_serialize { macro_rules! impl_psbt_hash_serialize {
($hash_type:ty) => { ($hash_type:ty) => {
impl $crate::util::psbt::serialize::Serialize for $hash_type { impl $crate::util::psbt::serialize::Serialize for $hash_type {
fn serialize(&self) -> Vec<u8> { fn serialize(&self) -> $crate::prelude::Vec<u8> {
self.into_inner().to_vec() self.into_inner().to_vec()
} }
} }

View File

@ -12,8 +12,8 @@
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// //
use std::collections::BTreeMap; use prelude::*;
use std::collections::btree_map::Entry;
use io::{self, Cursor, Read}; use io::{self, Cursor, Read};
use core::cmp; use core::cmp;
@ -89,12 +89,12 @@ impl Map for Global {
match raw_key.type_value { match raw_key.type_value {
PSBT_GLOBAL_UNSIGNED_TX => return Err(Error::DuplicateKey(raw_key).into()), PSBT_GLOBAL_UNSIGNED_TX => return Err(Error::DuplicateKey(raw_key).into()),
PSBT_GLOBAL_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) { PSBT_GLOBAL_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) {
Entry::Vacant(empty_key) => {empty_key.insert(raw_value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);},
Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key).into()), btree_map::Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key).into()),
} }
_ => match self.unknown.entry(raw_key) { _ => match self.unknown.entry(raw_key) {
Entry::Vacant(empty_key) => {empty_key.insert(raw_value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);},
Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()), btree_map::Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()),
} }
} }
@ -183,10 +183,10 @@ impl Map for Global {
// Merging xpubs // Merging xpubs
for (xpub, (fingerprint1, derivation1)) in other.xpub { for (xpub, (fingerprint1, derivation1)) in other.xpub {
match self.xpub.entry(xpub) { match self.xpub.entry(xpub) {
Entry::Vacant(entry) => { btree_map::Entry::Vacant(entry) => {
entry.insert((fingerprint1, derivation1)); entry.insert((fingerprint1, derivation1));
}, },
Entry::Occupied(mut entry) => { btree_map::Entry::Occupied(mut entry) => {
// Here in case of the conflict we select the version with algorithm: // Here in case of the conflict we select the version with algorithm:
// 1) if everything is equal we do nothing // 1) if everything is equal we do nothing
// 2) report an error if // 2) report an error if
@ -215,7 +215,7 @@ impl Map for Global {
} }
return Err(psbt::Error::MergeConflict(format!( return Err(psbt::Error::MergeConflict(format!(
"global xpub {} has inconsistent key sources", xpub "global xpub {} has inconsistent key sources", xpub
).to_owned())); )));
} }
} }
} }
@ -321,12 +321,12 @@ impl Decodable for Global {
} }
} }
PSBT_GLOBAL_PROPRIETARY => match proprietary.entry(raw::ProprietaryKey::from_key(pair.key.clone())?) { PSBT_GLOBAL_PROPRIETARY => match proprietary.entry(raw::ProprietaryKey::from_key(pair.key.clone())?) {
Entry::Vacant(empty_key) => {empty_key.insert(pair.value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(pair.value);},
Entry::Occupied(_) => return Err(Error::DuplicateKey(pair.key).into()), btree_map::Entry::Occupied(_) => return Err(Error::DuplicateKey(pair.key).into()),
} }
_ => match unknowns.entry(pair.key) { _ => match unknowns.entry(pair.key) {
Entry::Vacant(empty_key) => {empty_key.insert(pair.value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(pair.value);},
Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()), btree_map::Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()),
} }
} }
} }

View File

@ -12,8 +12,9 @@
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// //
use prelude::*;
use io; use io;
use std::collections::btree_map::{BTreeMap, Entry};
use blockdata::script::Script; use blockdata::script::Script;
use blockdata::transaction::{SigHashType, Transaction, TxOut}; use blockdata::transaction::{SigHashType, Transaction, TxOut};
@ -177,14 +178,14 @@ impl Map for Input {
psbt_insert_hash_pair(&mut self.hash256_preimages, raw_key, raw_value, error::PsbtHash::Hash256)?; psbt_insert_hash_pair(&mut self.hash256_preimages, raw_key, raw_value, error::PsbtHash::Hash256)?;
} }
PSBT_IN_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) { PSBT_IN_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) {
::std::collections::btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);},
::std::collections::btree_map::Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key).into()), btree_map::Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key).into()),
} }
_ => match self.unknown.entry(raw_key) { _ => match self.unknown.entry(raw_key) {
Entry::Vacant(empty_key) => { btree_map::Entry::Vacant(empty_key) => {
empty_key.insert(raw_value); empty_key.insert(raw_value);
} }
Entry::Occupied(k) => { btree_map::Entry::Occupied(k) => {
return Err(Error::DuplicateKey(k.key().clone()).into()) return Err(Error::DuplicateKey(k.key().clone()).into())
} }
}, },
@ -307,7 +308,7 @@ where
} }
let key_val: H = Deserialize::deserialize(&raw_key.key)?; let key_val: H = Deserialize::deserialize(&raw_key.key)?;
match map.entry(key_val) { match map.entry(key_val) {
Entry::Vacant(empty_key) => { btree_map::Entry::Vacant(empty_key) => {
let val: Vec<u8> = Deserialize::deserialize(&raw_value)?; let val: Vec<u8> = Deserialize::deserialize(&raw_value)?;
if <H as hashes::Hash>::hash(&val) != key_val { if <H as hashes::Hash>::hash(&val) != key_val {
return Err(psbt::Error::InvalidPreimageHashPair { return Err(psbt::Error::InvalidPreimageHashPair {
@ -320,6 +321,6 @@ where
empty_key.insert(val); empty_key.insert(val);
Ok(()) Ok(())
} }
Entry::Occupied(_) => return Err(psbt::Error::DuplicateKey(raw_key).into()), btree_map::Entry::Occupied(_) => return Err(psbt::Error::DuplicateKey(raw_key).into()),
} }
} }

View File

@ -12,6 +12,8 @@
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// //
use prelude::*;
use io; use io;
use consensus::encode; use consensus::encode;

View File

@ -12,9 +12,9 @@
// If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
// //
use prelude::*;
use io; use io;
use std::collections::BTreeMap;
use std::collections::btree_map::Entry;
use blockdata::script::Script; use blockdata::script::Script;
use consensus::encode; use consensus::encode;
@ -79,12 +79,12 @@ impl Map for Output {
} }
} }
PSBT_OUT_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) { PSBT_OUT_PROPRIETARY => match self.proprietary.entry(raw::ProprietaryKey::from_key(raw_key.clone())?) {
Entry::Vacant(empty_key) => {empty_key.insert(raw_value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);},
Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key.clone()).into()), btree_map::Entry::Occupied(_) => return Err(Error::DuplicateKey(raw_key.clone()).into()),
} }
_ => match self.unknown.entry(raw_key) { _ => match self.unknown.entry(raw_key) {
Entry::Vacant(empty_key) => {empty_key.insert(raw_value);}, btree_map::Entry::Vacant(empty_key) => {empty_key.insert(raw_value);},
Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()), btree_map::Entry::Occupied(k) => return Err(Error::DuplicateKey(k.key().clone()).into()),
} }
} }

View File

@ -23,6 +23,8 @@ use blockdata::transaction::Transaction;
use consensus::{encode, Encodable, Decodable}; use consensus::{encode, Encodable, Decodable};
use consensus::encode::MAX_VEC_SIZE; use consensus::encode::MAX_VEC_SIZE;
use prelude::*;
use io; use io;
mod error; mod error;
@ -95,8 +97,8 @@ impl PartiallySignedTransaction {
#[cfg(feature = "base64")] #[cfg(feature = "base64")]
mod display_from_str { mod display_from_str {
use super::PartiallySignedTransaction; use super::PartiallySignedTransaction;
use std::fmt::{Display, Formatter, self}; use core::fmt::{Display, Formatter, self};
use std::str::FromStr; use core::str::FromStr;
use consensus::encode::{Error, self}; use consensus::encode::{Error, self};
use ::base64::display::Base64Display; use ::base64::display::Base64Display;
@ -118,6 +120,7 @@ mod display_from_str {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for PsbtParseError { } impl ::std::error::Error for PsbtParseError { }
impl Display for PartiallySignedTransaction { impl Display for PartiallySignedTransaction {
@ -215,7 +218,6 @@ mod tests {
use hashes::{sha256, hash160, Hash, ripemd160}; use hashes::{sha256, hash160, Hash, ripemd160};
use hash_types::Txid; use hash_types::Txid;
use std::collections::BTreeMap;
use secp256k1::Secp256k1; use secp256k1::Secp256k1;
@ -230,6 +232,7 @@ mod tests {
use super::PartiallySignedTransaction; use super::PartiallySignedTransaction;
use util::psbt::raw::ProprietaryKey; use util::psbt::raw::ProprietaryKey;
use std::collections::BTreeMap;
#[test] #[test]
fn trivial_psbt() { fn trivial_psbt() {
@ -464,7 +467,6 @@ mod tests {
} }
mod bip_vectors { mod bip_vectors {
use std::collections::BTreeMap;
#[cfg(feature = "base64")] #[cfg(feature = "base64")]
use std::str::FromStr; use std::str::FromStr;
@ -477,6 +479,7 @@ mod tests {
use util::psbt::map::{Map, Global, Input, Output}; use util::psbt::map::{Map, Global, Input, Output};
use util::psbt::raw; use util::psbt::raw;
use util::psbt::{PartiallySignedTransaction, Error}; use util::psbt::{PartiallySignedTransaction, Error};
use std::collections::BTreeMap;
#[test] #[test]
#[should_panic(expected = "InvalidMagic")] #[should_panic(expected = "InvalidMagic")]

View File

@ -17,12 +17,14 @@
//! Raw PSBT key-value pairs as defined at //! Raw PSBT key-value pairs as defined at
//! <https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki>. //! <https://github.com/bitcoin/bips/blob/master/bip-0174.mediawiki>.
use prelude::*;
use core::fmt; use core::fmt;
use io;
use io;
use consensus::encode::{self, ReadExt, WriteExt, Decodable, Encodable, VarInt, serialize, deserialize, MAX_VEC_SIZE}; use consensus::encode::{self, ReadExt, WriteExt, Decodable, Encodable, VarInt, serialize, deserialize, MAX_VEC_SIZE};
use hashes::hex; use hashes::hex;
use util::psbt::Error; use util::psbt::Error;
use util::read_to_end;
/// A PSBT key in its raw byte form. /// A PSBT key in its raw byte form.
#[derive(Debug, PartialEq, Hash, Eq, Clone, Ord, PartialOrd)] #[derive(Debug, PartialEq, Hash, Eq, Clone, Ord, PartialOrd)]
@ -152,9 +154,8 @@ impl<Subtype> Encodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u
impl<Subtype> Decodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u8> + Into<u8> { impl<Subtype> Decodable for ProprietaryKey<Subtype> where Subtype: Copy + From<u8> + Into<u8> {
fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> { fn consensus_decode<D: io::Read>(mut d: D) -> Result<Self, encode::Error> {
let prefix = Vec::<u8>::consensus_decode(&mut d)?; let prefix = Vec::<u8>::consensus_decode(&mut d)?;
let mut key = vec![];
let subtype = Subtype::from(d.read_u8()?); let subtype = Subtype::from(d.read_u8()?);
d.read_to_end(&mut key)?; let key = read_to_end(d)?;
Ok(ProprietaryKey { Ok(ProprietaryKey {
prefix, prefix,

View File

@ -17,6 +17,8 @@
//! Defines traits used for (de)serializing PSBT values into/from raw //! Defines traits used for (de)serializing PSBT values into/from raw
//! bytes in PSBT key-value pairs. //! bytes in PSBT key-value pairs.
use prelude::*;
use io; use io;
use blockdata::script::Script; use blockdata::script::Script;

View File

@ -450,7 +450,7 @@ macro_rules! construct_uint {
fn deserialize<D: $crate::serde::Deserializer<'de>>( fn deserialize<D: $crate::serde::Deserializer<'de>>(
deserializer: D, deserializer: D,
) -> Result<Self, D::Error> { ) -> Result<Self, D::Error> {
use ::std::fmt; use ::core::fmt;
use $crate::hashes::hex::FromHex; use $crate::hashes::hex::FromHex;
use $crate::serde::de; use $crate::serde::de;
struct Visitor; struct Visitor;
@ -465,7 +465,7 @@ macro_rules! construct_uint {
where where
E: de::Error, E: de::Error,
{ {
let bytes = Vec::from_hex(s) let bytes = $crate::prelude::Vec::from_hex(s)
.map_err(|_| de::Error::invalid_value(de::Unexpected::Str(s), &self))?; .map_err(|_| de::Error::invalid_value(de::Unexpected::Str(s), &self))?;
$name::from_be_slice(&bytes) $name::from_be_slice(&bytes)
.map_err(|_| de::Error::invalid_length(bytes.len() * 2, &self)) .map_err(|_| de::Error::invalid_length(bytes.len() * 2, &self))
@ -509,6 +509,7 @@ impl ::core::fmt::Display for ParseLengthError {
} }
} }
#[cfg(feature = "std")]
impl ::std::error::Error for ParseLengthError {} impl ::std::error::Error for ParseLengthError {}
impl Uint256 { impl Uint256 {