From 08a20f8764810ff1b49ff08e4c9f218d0c2ccfb6 Mon Sep 17 00:00:00 2001 From: Andrew Poelstra Date: Tue, 7 Apr 2015 17:51:57 -0500 Subject: [PATCH] Checkpoint commit Work is stalled on some other library work (to give better lifetime requirements on `eventual::Future` and avoid some unsafety), so committing here. There are only three errors left in this round :) Also all the indenting is done, so there should be no more massive rewrite commits. Depending how invasive the lifetime-error fixes are, I may even be able to do sanely sized commits from here on. --- Cargo.toml | 3 + src/blockdata/block.rs | 192 ++--- src/blockdata/blockchain.rs | 948 +++++++++++------------ src/blockdata/constants.rs | 222 +++--- src/blockdata/opcodes.rs | 8 +- src/blockdata/script.rs | 295 ++++--- src/blockdata/transaction.rs | 463 +++++------ src/blockdata/utxoset.rs | 818 ++++++++++---------- src/internal_macros.rs | 81 +- src/lib.rs | 1 + src/macros.rs | 227 +++--- src/network/address.rs | 152 ++-- src/network/message_blockdata.rs | 190 ++--- src/network/socket.rs | 26 +- src/util/base58.rs | 312 ++++---- src/util/hash.rs | 406 +++++----- src/util/iter.rs | 80 +- src/util/misc.rs | 192 ++--- src/util/patricia_tree.rs | 1226 +++++++++++++++--------------- src/util/uint.rs | 836 ++++++++++---------- src/wallet/address.rs | 274 +++---- src/wallet/address_index.rs | 148 ++-- src/wallet/bip32.rs | 870 ++++++++++----------- src/wallet/wallet.rs | 406 +++++----- 24 files changed, 4174 insertions(+), 4202 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f9641bda..00c8b0ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,9 @@ path = "src/lib.rs" [dependencies.secp256k1] git = "https://github.com/apoelstra/bitcoin-secp256k1-rs.git" +[dependencies.eventual] +git = "https://github.com/carllerche/eventual" + [dependencies] byteorder = "*" num_cpus = "*" diff --git a/src/blockdata/block.rs b/src/blockdata/block.rs index 88aebb28..f938d796 100644 --- a/src/blockdata/block.rs +++ b/src/blockdata/block.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -34,101 +34,101 @@ use blockdata::transaction::Transaction; /// the actual transactions #[derive(PartialEq, Eq, Clone, Debug)] pub struct BlockHeader { - /// The protocol version. Should always be 1. - pub version: u32, - /// Reference to the previous block in the chain - pub prev_blockhash: Sha256dHash, - /// The root hash of the merkle tree of transactions in the block - pub merkle_root: Sha256dHash, - /// The timestamp of the block, as claimed by the mainer - pub time: u32, - /// The target value below which the blockhash must lie, encoded as a - /// a float (with well-defined rounding, of course) - pub bits: u32, - /// The nonce, selected to obtain a low enough blockhash - pub nonce: u32, + /// The protocol version. Should always be 1. + pub version: u32, + /// Reference to the previous block in the chain + pub prev_blockhash: Sha256dHash, + /// The root hash of the merkle tree of transactions in the block + pub merkle_root: Sha256dHash, + /// The timestamp of the block, as claimed by the mainer + pub time: u32, + /// The target value below which the blockhash must lie, encoded as a + /// a float (with well-defined rounding, of course) + pub bits: u32, + /// The nonce, selected to obtain a low enough blockhash + pub nonce: u32, } /// A Bitcoin block, which is a collection of transactions with an attached /// proof of work. #[derive(PartialEq, Eq, Clone, Debug)] pub struct Block { - /// The block header - pub header: BlockHeader, - /// List of transactions contained in the block - pub txdata: Vec + /// The block header + pub header: BlockHeader, + /// List of transactions contained in the block + pub txdata: Vec } /// A block header with txcount attached, which is given in the `headers` /// network message. #[derive(PartialEq, Eq, Clone, Debug)] pub struct LoneBlockHeader { - /// The actual block header - pub header: BlockHeader, - /// The number of transactions in the block. This will always be zero - /// when the LoneBlockHeader is returned as part ef a `headers` message. - pub tx_count: VarInt + /// The actual block header + pub header: BlockHeader, + /// The number of transactions in the block. This will always be zero + /// when the LoneBlockHeader is returned as part ef a `headers` message. + pub tx_count: VarInt } impl BlockHeader { - /// Computes the target [0, T] that a blockhash must land in to be valid - pub fn target(&self) -> Uint256 { - // This is a floating-point "compact" encoding originally used by - // OpenSSL, which satoshi put into consensus code, so we're stuck - // with it. The exponent needs to have 3 subtracted from it, hence - // this goofy decoding code: - let (mant, expt) = { - let unshifted_expt = self.bits >> 24; - if unshifted_expt <= 3 { - ((self.bits & 0xFFFFFF) >> 8 * (3 - unshifted_expt as usize), 0) - } else { - (self.bits & 0xFFFFFF, 8 * ((self.bits >> 24) - 3)) - } - }; + /// Computes the target [0, T] that a blockhash must land in to be valid + pub fn target(&self) -> Uint256 { + // This is a floating-point "compact" encoding originally used by + // OpenSSL, which satoshi put into consensus code, so we're stuck + // with it. The exponent needs to have 3 subtracted from it, hence + // this goofy decoding code: + let (mant, expt) = { + let unshifted_expt = self.bits >> 24; + if unshifted_expt <= 3 { + ((self.bits & 0xFFFFFF) >> 8 * (3 - unshifted_expt as usize), 0) + } else { + (self.bits & 0xFFFFFF, 8 * ((self.bits >> 24) - 3)) + } + }; - // The mantissa is signed but may not be negative - if mant > 0x7FFFFF { - Zero::zero() - } else { - from_u64::(mant as u64).unwrap() << (expt as usize) + // The mantissa is signed but may not be negative + if mant > 0x7FFFFF { + Zero::zero() + } else { + from_u64::(mant as u64).unwrap() << (expt as usize) + } } - } - /// Performs an SPV validation of a block, which confirms that the proof-of-work - /// is correct, but does not verify that the transactions are valid or encoded - /// correctly. - pub fn spv_validate(&self, required_target: &Uint256) -> Result<(), util::Error> { - let ref target = self.target(); - if target != required_target { - return Err(SpvBadTarget); + /// Performs an SPV validation of a block, which confirms that the proof-of-work + /// is correct, but does not verify that the transactions are valid or encoded + /// correctly. + pub fn spv_validate(&self, required_target: &Uint256) -> Result<(), util::Error> { + let ref target = self.target(); + if target != required_target { + return Err(SpvBadTarget); + } + let ref hash = self.bitcoin_hash().into_le(); + if hash <= target { Ok(()) } else { Err(SpvBadProofOfWork) } } - let ref hash = self.bitcoin_hash().into_le(); - if hash <= target { Ok(()) } else { Err(SpvBadProofOfWork) } - } - /// Returns the total work of the block - pub fn work(&self) -> Uint256 { - // 2**256 / (target + 1) == ~target / (target+1) + 1 (eqn shamelessly stolen from bitcoind) - let mut ret = !self.target(); - let mut ret1 = self.target(); - ret1.increment(); - ret = ret.div(&ret1); - ret.increment(); - ret - } + /// Returns the total work of the block + pub fn work(&self) -> Uint256 { + // 2**256 / (target + 1) == ~target / (target+1) + 1 (eqn shamelessly stolen from bitcoind) + let mut ret = !self.target(); + let mut ret1 = self.target(); + ret1.increment(); + ret = ret / ret1; + ret.increment(); + ret + } } impl BitcoinHash for BlockHeader { - fn bitcoin_hash(&self) -> Sha256dHash { - use network::serialize::serialize; - Sha256dHash::from_data(serialize(self).unwrap().as_slice()) - } + fn bitcoin_hash(&self) -> Sha256dHash { + use network::serialize::serialize; + Sha256dHash::from_data(&serialize(self).unwrap()) + } } impl BitcoinHash for Block { - fn bitcoin_hash(&self) -> Sha256dHash { - self.header.bitcoin_hash() - } + fn bitcoin_hash(&self) -> Sha256dHash { + self.header.bitcoin_hash() + } } impl_consensus_encoding!(BlockHeader, version, prev_blockhash, merkle_root, time, bits, nonce); @@ -137,36 +137,36 @@ impl_consensus_encoding!(LoneBlockHeader, header, tx_count); #[cfg(test)] mod tests { - use std::io; - use serialize::hex::FromHex; + use std::io; + use serialize::hex::FromHex; - use blockdata::block::Block; - use network::serialize::{deserialize, serialize}; + use blockdata::block::Block; + use network::serialize::{deserialize, serialize}; - #[test] - fn block_test() { - let some_block = "010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap(); - let cutoff_block = "010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac".from_hex().unwrap(); + #[test] + fn block_test() { + let some_block = "010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap(); + let cutoff_block = "010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac".from_hex().unwrap(); - let prevhash = "4ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000".from_hex().unwrap(); - let merkle = "bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914c".from_hex().unwrap(); + let prevhash = "4ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000".from_hex().unwrap(); + let merkle = "bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914c".from_hex().unwrap(); - let decode: io::Result = deserialize(some_block.clone()); - let bad_decode: io::Result = deserialize(cutoff_block); + let decode: io::Result = deserialize(some_block.clone()); + let bad_decode: io::Result = deserialize(cutoff_block); - assert!(decode.is_ok()); - assert!(bad_decode.is_err()); - let real_decode = decode.unwrap(); - assert_eq!(real_decode.header.version, 1); - assert_eq!(serialize(&real_decode.header.prev_blockhash), Ok(prevhash)); - // [test] TODO: actually compute the merkle root - assert_eq!(serialize(&real_decode.header.merkle_root), Ok(merkle)); - assert_eq!(real_decode.header.time, 1231965655); - assert_eq!(real_decode.header.bits, 486604799); - assert_eq!(real_decode.header.nonce, 2067413810); - // [test] TODO: check the transaction data - - assert_eq!(serialize(&real_decode), Ok(some_block)); - } + assert!(decode.is_ok()); + assert!(bad_decode.is_err()); + let real_decode = decode.unwrap(); + assert_eq!(real_decode.header.version, 1); + assert_eq!(serialize(&real_decode.header.prev_blockhash), Ok(prevhash)); + // [test] TODO: actually compute the merkle root + assert_eq!(serialize(&real_decode.header.merkle_root), Ok(merkle)); + assert_eq!(real_decode.header.time, 1231965655); + assert_eq!(real_decode.header.bits, 486604799); + assert_eq!(real_decode.header.nonce, 2067413810); + // [test] TODO: check the transaction data + + assert_eq!(serialize(&real_decode), Ok(some_block)); + } } diff --git a/src/blockdata/blockchain.rs b/src/blockdata/blockchain.rs index 770f36e3..231bb3c7 100644 --- a/src/blockdata/blockchain.rs +++ b/src/blockdata/blockchain.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -28,7 +28,7 @@ use std::{marker, num, ptr}; use blockdata::block::{Block, BlockHeader}; use blockdata::transaction::Transaction; use blockdata::constants::{DIFFCHANGE_INTERVAL, DIFFCHANGE_TIMESPAN, - TARGET_BLOCK_SPACING, max_target, genesis_block}; + TARGET_BLOCK_SPACING, max_target, genesis_block}; use network::constants::Network; use network::encodable::{ConsensusDecodable, ConsensusEncodable}; use network::serialize::{BitcoinHash, SimpleDecoder, SimpleEncoder}; @@ -44,206 +44,206 @@ type NodePtr = *const BlockchainNode; /// A link in the blockchain pub struct BlockchainNode { - /// The actual block - pub block: Block, - /// Total work from genesis to this point - pub total_work: Uint256, - /// Expected value of `block.header.bits` for this block; only changes every - /// `blockdata::constants::DIFFCHANGE_INTERVAL;` blocks - pub required_difficulty: Uint256, - /// Height above genesis - pub height: u32, - /// Whether the transaction data is stored - pub has_txdata: bool, - /// Pointer to block's parent - prev: NodePtr, - /// Pointer to block's child - next: NodePtr + /// The actual block + pub block: Block, + /// Total work from genesis to this point + pub total_work: Uint256, + /// Expected value of `block.header.bits` for this block; only changes every + /// `blockdata::constants::DIFFCHANGE_INTERVAL;` blocks + pub required_difficulty: Uint256, + /// Height above genesis + pub height: u32, + /// Whether the transaction data is stored + pub has_txdata: bool, + /// Pointer to block's parent + prev: NodePtr, + /// Pointer to block's child + next: NodePtr } impl BlockchainNode { - /// Is the node on the main chain? - fn is_on_main_chain(&self, chain: &Blockchain) -> bool { - if self.block.header == unsafe { (*chain.best_tip).block.header } { - return true; - } - unsafe { - let mut scan = self.next; - while scan.is_not_null() { - if (*scan).block.header == (*chain.best_tip).block.header { - return true; + /// Is the node on the main chain? + fn is_on_main_chain(&self, chain: &Blockchain) -> bool { + if self.block.header == unsafe { (*chain.best_tip).block.header } { + return true; } - scan = (*scan).next; - } + unsafe { + let mut scan = self.next; + while !scan.is_null() { + if (*scan).block.header == (*chain.best_tip).block.header { + return true; + } + scan = (*scan).next; + } + } + return false; } - return false; - } } impl ConsensusEncodable for BlockchainNode { - #[inline] - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - try!(self.block.consensus_encode(s)); - try!(self.total_work.consensus_encode(s)); - try!(self.required_difficulty.consensus_encode(s)); - try!(self.height.consensus_encode(s)); - try!(self.has_txdata.consensus_encode(s)); - // Don't serialize the prev or next pointers - Ok(()) - } + #[inline] + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + try!(self.block.consensus_encode(s)); + try!(self.total_work.consensus_encode(s)); + try!(self.required_difficulty.consensus_encode(s)); + try!(self.height.consensus_encode(s)); + try!(self.has_txdata.consensus_encode(s)); + // Don't serialize the prev or next pointers + Ok(()) + } } impl ConsensusDecodable for BlockchainNode { - #[inline] - fn consensus_decode(d: &mut D) -> Result { - Ok(BlockchainNode { - block: try!(ConsensusDecodable::consensus_decode(d)), - total_work: try!(ConsensusDecodable::consensus_decode(d)), - required_difficulty: try!(ConsensusDecodable::consensus_decode(d)), - height: try!(ConsensusDecodable::consensus_decode(d)), - has_txdata: try!(ConsensusDecodable::consensus_decode(d)), - prev: ptr::null(), - next: ptr::null() - }) - } + #[inline] + fn consensus_decode(d: &mut D) -> Result { + Ok(BlockchainNode { + block: try!(ConsensusDecodable::consensus_decode(d)), + total_work: try!(ConsensusDecodable::consensus_decode(d)), + required_difficulty: try!(ConsensusDecodable::consensus_decode(d)), + height: try!(ConsensusDecodable::consensus_decode(d)), + has_txdata: try!(ConsensusDecodable::consensus_decode(d)), + prev: ptr::null(), + next: ptr::null() + }) + } } impl BitcoinHash for BlockchainNode { - fn bitcoin_hash(&self) -> Sha256dHash { - self.block.header.bitcoin_hash() - } + fn bitcoin_hash(&self) -> Sha256dHash { + self.block.header.bitcoin_hash() + } } /// The blockchain pub struct Blockchain { - network: Network, - tree: BlockTree, - best_tip: NodePtr, - best_hash: Sha256dHash, - genesis_hash: Sha256dHash + network: Network, + tree: BlockTree, + best_tip: NodePtr, + best_hash: Sha256dHash, + genesis_hash: Sha256dHash } impl ConsensusEncodable for Blockchain { - #[inline] - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - try!(self.network.consensus_encode(s)); - try!(self.tree.consensus_encode(s)); - try!(self.best_hash.consensus_encode(s)); - try!(self.genesis_hash.consensus_encode(s)); - Ok(()) - } + #[inline] + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + try!(self.network.consensus_encode(s)); + try!(self.tree.consensus_encode(s)); + try!(self.best_hash.consensus_encode(s)); + try!(self.genesis_hash.consensus_encode(s)); + Ok(()) + } } impl ConsensusDecodable for Blockchain { - fn consensus_decode(d: &mut D) -> Result { - let network: Network = try!(ConsensusDecodable::consensus_decode(d)); - let mut tree: BlockTree = try!(ConsensusDecodable::consensus_decode(d)); - let best_hash: Sha256dHash = try!(ConsensusDecodable::consensus_decode(d)); - let genesis_hash: Sha256dHash = try!(ConsensusDecodable::consensus_decode(d)); + fn consensus_decode(d: &mut D) -> Result { + let network: Network = try!(ConsensusDecodable::consensus_decode(d)); + let mut tree: BlockTree = try!(ConsensusDecodable::consensus_decode(d)); + let best_hash: Sha256dHash = try!(ConsensusDecodable::consensus_decode(d)); + let genesis_hash: Sha256dHash = try!(ConsensusDecodable::consensus_decode(d)); - // Lookup best tip - let best = match tree.lookup(&best_hash.into_le(), 256) { - Some(node) => &**node as NodePtr, - None => { - return Err(d.error(format!("best tip {:x} not in tree", best_hash).as_slice())); - } - }; - // Lookup genesis - if tree.lookup(&genesis_hash.into_le(), 256).is_none() { - return Err(d.error(format!("genesis {:x} not in tree", genesis_hash).as_slice())); - } - // Reconnect all prev pointers - let raw_tree = &tree as *const _; - for node in tree.mut_iter() { - let hash = node.block.header.prev_blockhash.into_le(); - let prevptr = - match unsafe { (*raw_tree).lookup(&hash, 256) } { - Some(node) => &**node as NodePtr, - None => ptr::null() + // Lookup best tip + let best = match tree.lookup(&best_hash.into_le(), 256) { + Some(node) => &**node as NodePtr, + None => { + return Err(d.error(format!("best tip {:x} not in tree", best_hash))); + } }; - node.prev = prevptr; - } - // Reconnect next pointers on the main chain - unsafe { - let mut scan = best; - while (*scan).prev.is_not_null() { - let prev = (*scan).prev as *mut BlockchainNode; - (*prev).next = scan; - scan = prev as NodePtr; - } + // Lookup genesis + if tree.lookup(&genesis_hash.into_le(), 256).is_none() { + return Err(d.error(format!("genesis {:x} not in tree", genesis_hash))); + } + // Reconnect all prev pointers + let raw_tree = &tree as *const BlockTree; + for node in tree.mut_iter() { + let hash = node.block.header.prev_blockhash.into_le(); + let prevptr = + match unsafe { (*raw_tree).lookup(&hash, 256) } { + Some(node) => &**node as NodePtr, + None => ptr::null() + }; + node.prev = prevptr; + } + // Reconnect next pointers on the main chain + unsafe { + let mut scan = best; + while !(*scan).prev.is_null() { + let prev = (*scan).prev as *mut BlockchainNode; + (*prev).next = scan; + scan = prev as NodePtr; + } - // Check that "genesis" is the genesis - if (*scan).bitcoin_hash() != genesis_hash { - return Err(d.error(format!("no path from tip {:x} to genesis {:x}", - best_hash, genesis_hash).as_slice())); - } - } + // Check that "genesis" is the genesis + if (*scan).bitcoin_hash() != genesis_hash { + return Err(d.error(format!("no path from tip {:x} to genesis {:x}", + best_hash, genesis_hash))); + } + } - // Return the chain - Ok(Blockchain { - network: network, - tree: tree, - best_tip: best, - best_hash: best_hash, - genesis_hash: genesis_hash - }) - } + // Return the chain + Ok(Blockchain { + network: network, + tree: tree, + best_tip: best, + best_hash: best_hash, + genesis_hash: genesis_hash + }) + } } // TODO: this should maybe be public, in which case it needs to be tagged // with a PhantomData marker tying it to the tree's lifetime. struct LocatorHashIter { - index: NodePtr, - count: usize, - skip: usize + index: NodePtr, + count: usize, + skip: usize } impl LocatorHashIter { - fn new(init: NodePtr) -> LocatorHashIter { - LocatorHashIter { index: init, count: 0, skip: 1 } - } + fn new(init: NodePtr) -> LocatorHashIter { + LocatorHashIter { index: init, count: 0, skip: 1 } + } } impl Iterator for LocatorHashIter { - type Item = Sha256dHash; + type Item = Sha256dHash; - fn next(&mut self) -> Option { - if self.index.is_null() { - return None; - } - let ret = Some(unsafe { (*self.index).bitcoin_hash() }); - - // Rewind once (if we are at the genesis, this will set self.index to None) - self.index = unsafe { (*self.index).prev }; - // If we are not at the genesis, rewind `self.skip` times, or until we are. - if self.index.is_not_null() { - for _ in 1..self.skip { - unsafe { - if (*self.index).prev.is_null() { - break; - } - self.index = (*self.index).prev; + fn next(&mut self) -> Option { + if self.index.is_null() { + return None; } - } - } + let ret = Some(unsafe { (*self.index).bitcoin_hash() }); - self.count += 1; - if self.count > 10 { - self.skip *= 2; + // Rewind once (if we are at the genesis, this will set self.index to None) + self.index = unsafe { (*self.index).prev }; + // If we are not at the genesis, rewind `self.skip` times, or until we are. + if !self.index.is_null() { + for _ in 1..self.skip { + unsafe { + if (*self.index).prev.is_null() { + break; + } + self.index = (*self.index).prev; + } + } + } + + self.count += 1; + if self.count > 10 { + self.skip *= 2; + } + ret } - ret - } } /// An iterator over blocks in blockheight order pub struct BlockIter<'tree> { - index: NodePtr, - // Note: we don't actually touch the blockchain. But we need - // to keep it borrowed to prevent it being mutated, since some - // mutable blockchain methods call .mut_borrow() on the block - // links, which would blow up if the iterator did a regular - // borrow at the same time. - marker: marker::PhantomData<&'tree Blockchain> + index: NodePtr, + // Note: we don't actually touch the blockchain. But we need + // to keep it borrowed to prevent it being mutated, since some + // mutable blockchain methods call .mut_borrow() on the block + // links, which would blow up if the iterator did a regular + // borrow at the same time. + marker: marker::PhantomData<&'tree Blockchain> } /// An iterator over blocks in reverse blockheight order. Note that this @@ -253,9 +253,9 @@ pub struct BlockIter<'tree> { /// and its `.rev()` would be iterators over different chains! To avoid /// this suprising behaviour we simply use separate iterators. pub struct RevBlockIter<'tree> { - index: NodePtr, - // See comment in BlockIter for why we need this - marker: marker::PhantomData<&'tree Blockchain> + index: NodePtr, + // See comment in BlockIter for why we need this + marker: marker::PhantomData<&'tree Blockchain> } /// An iterator over blocks in reverse blockheight order, which yielding only @@ -272,373 +272,373 @@ pub struct RevBlockIter<'tree> { /// the end of the iteration), you can then sync it up same as if you were doing /// a plain old fast-forward. pub struct RevStaleBlockIter<'tree> { - index: NodePtr, - chain: &'tree Blockchain + index: NodePtr, + chain: &'tree Blockchain } impl<'tree> Iterator for BlockIter<'tree> { - type Item = &'tree BlockchainNode; + type Item = &'tree BlockchainNode; - fn next(&mut self) -> Option<&'tree BlockchainNode> { - if self.index.is_null() { - return None; + fn next(&mut self) -> Option<&'tree BlockchainNode> { + if self.index.is_null() { + return None; + } + unsafe { + let ret = Some(&*self.index); + self.index = (*self.index).next; + ret + } } - unsafe { - let ret = Some(&*self.index); - self.index = (*self.index).next; - ret - } - } } impl<'tree> Iterator for RevBlockIter<'tree> { - type Item = &'tree BlockchainNode; + type Item = &'tree BlockchainNode; - fn next(&mut self) -> Option<&'tree BlockchainNode> { - if self.index.is_null() { - return None; + fn next(&mut self) -> Option<&'tree BlockchainNode> { + if self.index.is_null() { + return None; + } + unsafe { + let ret = Some(&*self.index); + self.index = (*self.index).prev; + ret + } } - unsafe { - let ret = Some(&*self.index); - self.index = (*self.index).prev; - ret - } - } } impl<'tree> Iterator for RevStaleBlockIter<'tree> { - type Item = &'tree Block; + type Item = &'tree Block; - fn next(&mut self) -> Option<&'tree Block> { - if self.index.is_null() { - return None; - } + fn next(&mut self) -> Option<&'tree Block> { + if self.index.is_null() { + return None; + } - unsafe { - let ret = Some(&(*self.index).block); - let next_index = (*self.index).prev; - // Check if the next block is going to be on the main chain - if next_index.is_not_null() && - (*next_index).next != self.index && - (&*next_index).is_on_main_chain(self.chain) { - self.index = ptr::null(); - } else { - self.index = next_index; - } - ret + unsafe { + let ret = Some(&(*self.index).block); + let next_index = (*self.index).prev; + // Check if the next block is going to be on the main chain + if !next_index.is_null() && + (*next_index).next != self.index && + (&*next_index).is_on_main_chain(self.chain) { + self.index = ptr::null(); + } else { + self.index = next_index; + } + ret + } } - } } /// This function emulates the GetCompact(SetCompact(n)) in the satoshi code, /// which drops the precision to something that can be encoded precisely in /// the nBits block header field. Savour the perversity. This is in Bitcoin /// consensus code. What. Gaah! -fn satoshi_the_precision(n: &Uint256) -> Uint256 { - // Shift by B bits right then left to turn the low bits to zero - let bits = 8 * ((n.bits() + 7) / 8 - 3); - let mut ret = n >> bits; - // Oh, did I say B was that fucked up formula? I meant sometimes also + 8. - if ret.bit(23) { - ret = (ret >> 8) << 8; - } - ret << bits +fn satoshi_the_precision(n: Uint256) -> Uint256 { + // Shift by B bits right then left to turn the low bits to zero + let bits = 8 * ((n.bits() + 7) / 8 - 3); + let mut ret = n >> bits; + // Oh, did I say B was that fucked up formula? I meant sometimes also + 8. + if ret.bit(23) { + ret = (ret >> 8) << 8; + } + ret << bits } impl Blockchain { - /// Constructs a new blockchain - pub fn new(network: Network) -> Blockchain { - let genesis = genesis_block(network); - let genhash = genesis.header.bitcoin_hash(); - let new_node = Box::new(BlockchainNode { - total_work: Zero::zero(), - required_difficulty: genesis.header.target(), - block: genesis, - height: 0, - has_txdata: true, - prev: ptr::null(), - next: ptr::null() - }); - let raw_ptr = &*new_node as NodePtr; - Blockchain { - network: network, - tree: { - let mut pat = PatriciaTree::new(); - pat.insert(&genhash.into_le(), 256, new_node); - pat - }, - best_hash: genhash, - genesis_hash: genhash, - best_tip: raw_ptr + /// Constructs a new blockchain + pub fn new(network: Network) -> Blockchain { + let genesis = genesis_block(network); + let genhash = genesis.header.bitcoin_hash(); + let new_node = Box::new(BlockchainNode { + total_work: Zero::zero(), + required_difficulty: genesis.header.target(), + block: genesis, + height: 0, + has_txdata: true, + prev: ptr::null(), + next: ptr::null() + }); + let raw_ptr = &*new_node as NodePtr; + Blockchain { + network: network, + tree: { + let mut pat = PatriciaTree::new(); + pat.insert(&genhash.into_le(), 256, new_node); + pat + }, + best_hash: genhash, + genesis_hash: genhash, + best_tip: raw_ptr + } } - } - fn replace_txdata(&mut self, hash: &Uint256, txdata: Vec, has_txdata: bool) -> Result<(), util::Error> { - match self.tree.lookup_mut(hash, 256) { - Some(existing_block) => { - unsafe { - // existing_block is an Rc. Rust will not let us mutate it under - // any circumstances, since if it were to be reallocated, then - // all other references to it would be destroyed. However, we - // just need a mutable pointer to the txdata vector; by calling - // Vec::clone_from() rather than assigning, we can be assured that - // no reallocation can occur, since clone_from() takes an &mut self, - // which it does not own and therefore cannot move. - // - // To be clear: there will undoubtedly be some reallocation within - // the Vec itself. We don't care about this. What we care about is - // that the Vec (and more pointedly, its containing struct) does not - // move, since this would invalidate the Rc that we are snookering. - use std::mem::{forget, transmute}; - let mutable_vec: &mut Vec = transmute(&existing_block.block.txdata); - mutable_vec.clone_from(&txdata); - // If mutable_vec went out of scope unhindered, it would deallocate - // the Vec it points to, since Rust assumes that a mutable vector - // is a unique reference (and this one is definitely not). - forget(mutable_vec); - // Do the same thing with the txdata flac - let mutable_bool: &mut bool = transmute(&existing_block.has_txdata); - *mutable_bool = has_txdata; - forget(mutable_bool); + fn replace_txdata(&mut self, hash: &Uint256, txdata: Vec, has_txdata: bool) -> Result<(), util::Error> { + match self.tree.lookup_mut(hash, 256) { + Some(existing_block) => { + unsafe { + // existing_block is an Rc. Rust will not let us mutate it under + // any circumstances, since if it were to be reallocated, then + // all other references to it would be destroyed. However, we + // just need a mutable pointer to the txdata vector; by calling + // Vec::clone_from() rather than assigning, we can be assured that + // no reallocation can occur, since clone_from() takes an &mut self, + // which it does not own and therefore cannot move. + // + // To be clear: there will undoubtedly be some reallocation within + // the Vec itself. We don't care about this. What we care about is + // that the Vec (and more pointedly, its containing struct) does not + // move, since this would invalidate the Rc that we are snookering. + use std::mem::{forget, transmute}; + let mutable_vec: &mut Vec = transmute(&existing_block.block.txdata); + mutable_vec.clone_from(&txdata); + // If mutable_vec went out of scope unhindered, it would deallocate + // the Vec it points to, since Rust assumes that a mutable vector + // is a unique reference (and this one is definitely not). + forget(mutable_vec); + // Do the same thing with the txdata flac + let mutable_bool: &mut bool = transmute(&existing_block.has_txdata); + *mutable_bool = has_txdata; + forget(mutable_bool); + } + Ok(()) + }, + None => Err(BlockNotFound) + } + } + + /// Looks up a block in the chain and returns the BlockchainNode containing it + pub fn get_block<'a>(&'a self, hash: Sha256dHash) -> Option<&'a BlockchainNode> { + self.tree.lookup(&hash.into_le(), 256).map(|node| &**node) + } + + /// Locates a block in the chain and overwrites its txdata + pub fn add_txdata(&mut self, block: Block) -> Result<(), util::Error> { + self.replace_txdata(&block.header.bitcoin_hash().into_le(), block.txdata, true) + } + + /// Locates a block in the chain and removes its txdata + pub fn remove_txdata(&mut self, hash: Sha256dHash) -> Result<(), util::Error> { + self.replace_txdata(&hash.into_le(), vec![], false) + } + + /// Adds a block header to the chain + pub fn add_header(&mut self, header: BlockHeader) -> Result<(), util::Error> { + self.real_add_block(Block { header: header, txdata: vec![] }, false) + } + + /// Adds a block to the chain + pub fn add_block(&mut self, block: Block) -> Result<(), util::Error> { + self.real_add_block(block, true) + } + + fn real_add_block(&mut self, block: Block, has_txdata: bool) -> Result<(), util::Error> { + // get_prev optimizes the common case where we are extending the best tip + #[inline] + fn get_prev<'a>(chain: &'a Blockchain, hash: Sha256dHash) -> Option { + if hash == chain.best_hash { + Some(chain.best_tip) + } else { + chain.tree.lookup(&hash.into_le(), 256).map(|boxptr| &**boxptr as NodePtr) + } + } + // Check for multiple inserts (bitcoind from c9a09183 to 3c85d2ec doesn't + // handle locator hashes properly and may return blocks multiple times, + // and this may also happen in case of a reorg. + if self.tree.lookup(&block.header.bitcoin_hash().into_le(), 256).is_some() { + return Err(DuplicateHash); + } + // Construct node, if possible + let new_block = match get_prev(self, block.header.prev_blockhash) { + Some(prev) => { + let difficulty = + // Compute required difficulty if this is a diffchange block + if (unsafe { (*prev).height } + 1) % DIFFCHANGE_INTERVAL == 0 { + let timespan = unsafe { + // Scan back DIFFCHANGE_INTERVAL blocks + let mut scan = prev; + for _ in 0..(DIFFCHANGE_INTERVAL - 1) { + scan = (*scan).prev; + } + // Get clamped timespan between first and last blocks + match (*prev).block.header.time - (*scan).block.header.time { + n if n < DIFFCHANGE_TIMESPAN / 4 => DIFFCHANGE_TIMESPAN / 4, + n if n > DIFFCHANGE_TIMESPAN * 4 => DIFFCHANGE_TIMESPAN * 4, + n => n + } + }; + // Compute new target + let mut target = unsafe { (*prev).block.header.target() }; + target = target.mul_u32(timespan); + target = target / num::FromPrimitive::from_u64(DIFFCHANGE_TIMESPAN as u64).unwrap(); + // Clamp below MAX_TARGET (difficulty 1) + let max = max_target(self.network); + if target > max { target = max }; + // Compactify (make expressible in the 8+24 nBits float format + satoshi_the_precision(target) + // On non-diffchange blocks, Testnet has a rule that any 20-minute-long + // block intervals result the difficulty + } else if self.network == Network::Testnet && + block.header.time > unsafe { (*prev).block.header.time } + 2*TARGET_BLOCK_SPACING { + max_target(self.network) + // On the other hand, if we are in Testnet and the block interval is less + // than 20 minutes, we need to scan backward to find a block for which the + // previous rule did not apply, to find the "real" difficulty. + } else if self.network == Network::Testnet { + // Scan back DIFFCHANGE_INTERVAL blocks + unsafe { + let mut scan = prev; + while (*scan).height % DIFFCHANGE_INTERVAL != 0 && + (*scan).required_difficulty == max_target(self.network) { + scan = (*scan).prev; + } + (*scan).required_difficulty + } + // Otherwise just use the last block's difficulty + } else { + unsafe { (*prev).required_difficulty } + }; + // Create node + let ret = Box::new(BlockchainNode { + total_work: block.header.work() + unsafe { (*prev).total_work }, + block: block, + required_difficulty: difficulty, + height: unsafe { (*prev).height + 1 }, + has_txdata: has_txdata, + prev: prev, + next: ptr::null() + }); + unsafe { + let prev = prev as *mut BlockchainNode; + (*prev).next = &*ret as NodePtr; + } + ret + }, + None => { + return Err(PrevHashNotFound); + } + }; + + // spv validate the block + try!(new_block.block.header.spv_validate(&new_block.required_difficulty)); + + // Insert the new block + let raw_ptr = &*new_block as NodePtr; + self.tree.insert(&new_block.block.header.bitcoin_hash().into_le(), 256, new_block); + // Replace the best tip if necessary + if unsafe { (*raw_ptr).total_work > (*self.best_tip).total_work } { + self.set_best_tip(raw_ptr); } Ok(()) - }, - None => Err(BlockNotFound) } - } - /// Looks up a block in the chain and returns the BlockchainNode containing it - pub fn get_block<'a>(&'a self, hash: Sha256dHash) -> Option<&'a BlockchainNode> { - self.tree.lookup(&hash.into_le(), 256).map(|node| &**node) - } - - /// Locates a block in the chain and overwrites its txdata - pub fn add_txdata(&mut self, block: Block) -> Result<(), util::Error> { - self.replace_txdata(&block.header.bitcoin_hash().into_le(), block.txdata, true) - } - - /// Locates a block in the chain and removes its txdata - pub fn remove_txdata(&mut self, hash: Sha256dHash) -> Result<(), util::Error> { - self.replace_txdata(&hash.into_le(), vec![], false) - } - - /// Adds a block header to the chain - pub fn add_header(&mut self, header: BlockHeader) -> Result<(), util::Error> { - self.real_add_block(Block { header: header, txdata: vec![] }, false) - } - - /// Adds a block to the chain - pub fn add_block(&mut self, block: Block) -> Result<(), util::Error> { - self.real_add_block(block, true) - } - - fn real_add_block(&mut self, block: Block, has_txdata: bool) -> Result<(), util::Error> { - // get_prev optimizes the common case where we are extending the best tip - #[inline] - fn get_prev<'a>(chain: &'a Blockchain, hash: Sha256dHash) -> Option { - if hash == chain.best_hash { - Some(chain.best_tip) - } else { - chain.tree.lookup(&hash.into_le(), 256).map(|boxptr| &**boxptr as NodePtr) - } - } - // Check for multiple inserts (bitcoind from c9a09183 to 3c85d2ec doesn't - // handle locator hashes properly and may return blocks multiple times, - // and this may also happen in case of a reorg. - if self.tree.lookup(&block.header.bitcoin_hash().into_le(), 256).is_some() { - return Err(DuplicateHash); - } - // Construct node, if possible - let new_block = match get_prev(self, block.header.prev_blockhash) { - Some(prev) => { - let difficulty = - // Compute required difficulty if this is a diffchange block - if (unsafe { (*prev).height } + 1) % DIFFCHANGE_INTERVAL == 0 { - let timespan = unsafe { - // Scan back DIFFCHANGE_INTERVAL blocks - let mut scan = prev; - for _ in 0..(DIFFCHANGE_INTERVAL - 1) { - scan = (*scan).prev; - } - // Get clamped timespan between first and last blocks - match (*prev).block.header.time - (*scan).block.header.time { - n if n < DIFFCHANGE_TIMESPAN / 4 => DIFFCHANGE_TIMESPAN / 4, - n if n > DIFFCHANGE_TIMESPAN * 4 => DIFFCHANGE_TIMESPAN * 4, - n => n - } - }; - // Compute new target - let mut target = unsafe { (*prev).block.header.target() }; - target = target.mul_u32(timespan); - target = target / num::FromPrimitive::from_u64(DIFFCHANGE_TIMESPAN as u64).unwrap(); - // Clamp below MAX_TARGET (difficulty 1) - let max = max_target(self.network); - if target > max { target = max }; - // Compactify (make expressible in the 8+24 nBits float format - satoshi_the_precision(&target) - // On non-diffchange blocks, Testnet has a rule that any 20-minute-long - // block intervals result the difficulty - } else if self.network == Network::BitcoinTestnet && - block.header.time > unsafe { (*prev).block.header.time } + 2*TARGET_BLOCK_SPACING { - max_target(self.network) - // On the other hand, if we are in Testnet and the block interval is less - // than 20 minutes, we need to scan backward to find a block for which the - // previous rule did not apply, to find the "real" difficulty. - } else if self.network == Network::BitcoinTestnet { - // Scan back DIFFCHANGE_INTERVAL blocks - unsafe { - let mut scan = prev; - while (*scan).height % DIFFCHANGE_INTERVAL != 0 && - (*scan).required_difficulty == max_target(self.network) { - scan = (*scan).prev; - } - (*scan).required_difficulty - } - // Otherwise just use the last block's difficulty - } else { - unsafe { (*prev).required_difficulty } - }; - // Create node - let ret = Box::new(BlockchainNode { - total_work: block.header.work().add(unsafe { &(*prev).total_work }), - block: block, - required_difficulty: difficulty, - height: unsafe { (*prev).height + 1 }, - has_txdata: has_txdata, - prev: prev, - next: ptr::null() - }); + /// Sets the best tip (not public) + fn set_best_tip(&mut self, tip: NodePtr) { + // Fix next links unsafe { - let prev = prev as *mut BlockchainNode; - (*prev).next = &*ret as NodePtr; + let mut scan = self.best_tip; + // Scan backward + while !(*scan).prev.is_null() { + // If we hit the old best, there is no need to reorg. + if scan == self.best_tip { break; } + // Otherwise set the next-ptr and carry on + let prev = (*scan).prev as *mut BlockchainNode; + (*prev).next = scan; + scan = (*scan).prev; + } } - ret - }, - None => { - return Err(PrevHashNotFound); - } - }; - - // spv validate the block - try!(new_block.block.header.spv_validate(&new_block.required_difficulty)); - - // Insert the new block - let raw_ptr = &*new_block as NodePtr; - self.tree.insert(&new_block.block.header.bitcoin_hash().into_le(), 256, new_block); - // Replace the best tip if necessary - if unsafe { (*raw_ptr).total_work > (*self.best_tip).total_work } { - self.set_best_tip(raw_ptr); + // Set best + self.best_hash = unsafe { (*tip).bitcoin_hash() }; + self.best_tip = tip; } - Ok(()) - } - /// Sets the best tip (not public) - fn set_best_tip(&mut self, tip: NodePtr) { - // Fix next links - unsafe { - let mut scan = self.best_tip; - // Scan backward - while (*scan).prev.is_not_null() { - // If we hit the old best, there is no need to reorg. - if scan == self.best_tip { break; } - // Otherwise set the next-ptr and carry on - let prev = (*scan).prev as *mut BlockchainNode; - (*prev).next = scan; - scan = (*scan).prev; - } + /// Returns the genesis block's blockhash + pub fn genesis_hash(&self) -> Sha256dHash { + self.genesis_hash } - // Set best - self.best_hash = unsafe { (*tip).bitcoin_hash() }; - self.best_tip = tip; - } - /// Returns the genesis block's blockhash - pub fn genesis_hash(&self) -> Sha256dHash { - self.genesis_hash - } - - /// Returns the best tip - pub fn best_tip<'a>(&'a self) -> &'a Block { - unsafe { &(*self.best_tip).block } - } - - /// Returns the best tip's blockhash - pub fn best_tip_hash(&self) -> Sha256dHash { - self.best_hash - } - - /// Returns an array of locator hashes used in `getheaders` messages - pub fn locator_hashes(&self) -> Vec { - LocatorHashIter::new(self.best_tip).collect() - } - - /// An iterator over all blocks in the chain starting from `start_hash` - pub fn iter<'a>(&'a self, start_hash: Sha256dHash) -> BlockIter<'a> { - let start = match self.tree.lookup(&start_hash.into_le(), 256) { - Some(boxptr) => &**boxptr as NodePtr, - None => ptr::null() - }; - BlockIter { - index: start, - marker: marker::PhantomData + /// Returns the best tip + pub fn best_tip<'a>(&'a self) -> &'a Block { + unsafe { &(*self.best_tip).block } } - } - /// An iterator over all blocks in reverse order to the genesis, starting with `start_hash` - pub fn rev_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevBlockIter<'a> { - let start = match self.tree.lookup(&start_hash.into_le(), 256) { - Some(boxptr) => &**boxptr as NodePtr, - None => ptr::null() - }; - RevBlockIter { - index: start, - marker: marker::PhantomData + /// Returns the best tip's blockhash + pub fn best_tip_hash(&self) -> Sha256dHash { + self.best_hash } - } - /// An iterator over all blocks -not- in the best chain, in reverse order, starting from `start_hash` - pub fn rev_stale_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevStaleBlockIter<'a> { - let start = match self.tree.lookup(&start_hash.into_le(), 256) { - Some(boxptr) => { - // If we are already on the main chain, we have a dead iterator - if boxptr.is_on_main_chain(self) { - ptr::null() - } else { - &**boxptr as NodePtr - } + /// Returns an array of locator hashes used in `getheaders` messages + pub fn locator_hashes(&self) -> Vec { + LocatorHashIter::new(self.best_tip).collect() + } + + /// An iterator over all blocks in the chain starting from `start_hash` + pub fn iter<'a>(&'a self, start_hash: Sha256dHash) -> BlockIter<'a> { + let start = match self.tree.lookup(&start_hash.into_le(), 256) { + Some(boxptr) => &**boxptr as NodePtr, + None => ptr::null() + }; + BlockIter { + index: start, + marker: marker::PhantomData + } + } + + /// An iterator over all blocks in reverse order to the genesis, starting with `start_hash` + pub fn rev_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevBlockIter<'a> { + let start = match self.tree.lookup(&start_hash.into_le(), 256) { + Some(boxptr) => &**boxptr as NodePtr, + None => ptr::null() + }; + RevBlockIter { + index: start, + marker: marker::PhantomData + } + } + + /// An iterator over all blocks -not- in the best chain, in reverse order, starting from `start_hash` + pub fn rev_stale_iter<'a>(&'a self, start_hash: Sha256dHash) -> RevStaleBlockIter<'a> { + let start = match self.tree.lookup(&start_hash.into_le(), 256) { + Some(boxptr) => { + // If we are already on the main chain, we have a dead iterator + if boxptr.is_on_main_chain(self) { + ptr::null() + } else { + &**boxptr as NodePtr + } + } + None => ptr::null() + }; + RevStaleBlockIter { + index: start, + chain: self } - None => ptr::null() - }; - RevStaleBlockIter { - index: start, - chain: self } - } } #[cfg(test)] mod tests { - use std::io; + use std::io; - use blockdata::blockchain::Blockchain; - use blockdata::constants::genesis_block; - use network::constants::Network::Bitcoin; - use network::serialize::{BitcoinHash, deserialize, serialize}; + use blockdata::blockchain::Blockchain; + use blockdata::constants::genesis_block; + use network::constants::Network::Bitcoin; + use network::serialize::{BitcoinHash, deserialize, serialize}; - #[test] - fn blockchain_serialize_test() { - let empty_chain = Blockchain::new(Bitcoin); - assert_eq!(empty_chain.best_tip().header.bitcoin_hash(), - genesis_block(Bitcoin).header.bitcoin_hash()); + #[test] + fn blockchain_serialize_test() { + let empty_chain = Blockchain::new(Bitcoin); + assert_eq!(empty_chain.best_tip().header.bitcoin_hash(), + genesis_block(Bitcoin).header.bitcoin_hash()); - let serial = serialize(&empty_chain); - let deserial: io::Result = deserialize(serial.unwrap()); + let serial = serialize(&empty_chain); + let deserial: io::Result = deserialize(serial.unwrap()); - assert!(deserial.is_ok()); - let read_chain = deserial.unwrap(); - assert_eq!(read_chain.best_tip().header.bitcoin_hash(), - genesis_block(Bitcoin).header.bitcoin_hash()); - } + assert!(deserial.is_ok()); + let read_chain = deserial.unwrap(); + assert_eq!(read_chain.best_tip().header.bitcoin_hash(), + genesis_block(Bitcoin).header.bitcoin_hash()); + } } diff --git a/src/blockdata/constants.rs b/src/blockdata/constants.rs index 0119e8a5..e6d83ffe 100644 --- a/src/blockdata/constants.rs +++ b/src/blockdata/constants.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -23,7 +23,7 @@ use std::default::Default; use std::num::from_u64; use blockdata::opcodes; -use blockdata::script::Script; +use blockdata::script::ScriptBuilder; use blockdata::transaction::{Transaction, TxOut, TxIn}; use blockdata::block::{Block, BlockHeader}; use network::constants::Network; @@ -39,144 +39,144 @@ pub static DIFFCHANGE_TIMESPAN: u32 = 14 * 24 * 3600; /// In Bitcoind this is insanely described as ~((u256)0 >> 32) pub fn max_target(_: Network) -> Uint256 { - from_u64::(0xFFFF).unwrap() << 208 + from_u64::(0xFFFF).unwrap() << 208 } /// The maximum value allowed in an output (useful for sanity checking, /// since keeping everything below this value should prevent overflows /// if you are doing anything remotely sane with monetary values). pub fn max_money(_: Network) -> u64 { - 21_000_000 * COIN_VALUE + 21_000_000 * COIN_VALUE } /// Constructs and returns the coinbase (and only) transaction of the Bitcoin genesis block fn bitcoin_genesis_tx() -> Transaction { - // Base - let mut ret = Transaction { - version: 1, - lock_time: 0, - input: vec![], - output: vec![] - }; + // Base + let mut ret = Transaction { + version: 1, + lock_time: 0, + input: vec![], + output: vec![] + }; - // Inputs - let mut in_script = Script::new(); - in_script.push_scriptint(486604799); - in_script.push_scriptint(4); - in_script.push_slice("The Times 03/Jan/2009 Chancellor on brink of second bailout for banks".as_bytes()); - ret.input.push(TxIn { - prev_hash: Default::default(), - prev_index: 0xFFFFFFFF, - script_sig: in_script, - sequence: MAX_SEQUENCE - }); + // Inputs + let mut in_script = ScriptBuilder::new(); + in_script.push_scriptint(486604799); + in_script.push_scriptint(4); + in_script.push_slice("The Times 03/Jan/2009 Chancellor on brink of second bailout for banks".as_bytes()); + ret.input.push(TxIn { + prev_hash: Default::default(), + prev_index: 0xFFFFFFFF, + script_sig: in_script.into_script(), + sequence: MAX_SEQUENCE + }); - // Outputs - let mut out_script = Script::new(); - out_script.push_slice(hex_bytes("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f").unwrap().as_slice()); - out_script.push_opcode(opcodes::All::OP_CHECKSIG); - ret.output.push(TxOut { - value: 50 * COIN_VALUE, - script_pubkey: out_script - }); + // Outputs + let mut out_script = ScriptBuilder::new(); + out_script.push_slice(hex_bytes("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f").unwrap().as_slice()); + out_script.push_opcode(opcodes::All::OP_CHECKSIG); + ret.output.push(TxOut { + value: 50 * COIN_VALUE, + script_pubkey: out_script.into_script() + }); - // end - ret + // end + ret } /// Constructs and returns the genesis block pub fn genesis_block(network: Network) -> Block { - match network { - Network::Bitcoin => { - let txdata = vec![bitcoin_genesis_tx()]; - Block { - header: BlockHeader { - version: 1, - prev_blockhash: Default::default(), - merkle_root: txdata.merkle_root(), - time: 1231006505, - bits: 0x1d00ffff, - nonce: 2083236893 - }, - txdata: txdata - } + match network { + Network::Bitcoin => { + let txdata = vec![bitcoin_genesis_tx()]; + Block { + header: BlockHeader { + version: 1, + prev_blockhash: Default::default(), + merkle_root: txdata.merkle_root(), + time: 1231006505, + bits: 0x1d00ffff, + nonce: 2083236893 + }, + txdata: txdata + } + } + Network::Testnet => { + let txdata = vec![bitcoin_genesis_tx()]; + Block { + header: BlockHeader { + version: 1, + prev_blockhash: Default::default(), + merkle_root: txdata.merkle_root(), + time: 1296688602, + bits: 0x1d00ffff, + nonce: 414098458 + }, + txdata: txdata + } + } } - Network::Testnet => { - let txdata = vec![bitcoin_genesis_tx()]; - Block { - header: BlockHeader { - version: 1, - prev_blockhash: Default::default(), - merkle_root: txdata.merkle_root(), - time: 1296688602, - bits: 0x1d00ffff, - nonce: 414098458 - }, - txdata: txdata - } - } - } } #[cfg(test)] mod test { - use std::default::Default; - use serialize::hex::FromHex; + use std::default::Default; + use serialize::hex::FromHex; - use network::constants::Network; - use network::serialize::{BitcoinHash, serialize}; - use blockdata::constants::{genesis_block, bitcoin_genesis_tx}; - use blockdata::constants::{MAX_SEQUENCE, COIN_VALUE}; + use network::constants::Network; + use network::serialize::{BitcoinHash, serialize}; + use blockdata::constants::{genesis_block, bitcoin_genesis_tx}; + use blockdata::constants::{MAX_SEQUENCE, COIN_VALUE}; - #[test] - fn bitcoin_genesis_first_transaction() { - let gen = bitcoin_genesis_tx(); + #[test] + fn bitcoin_genesis_first_transaction() { + let gen = bitcoin_genesis_tx(); - assert_eq!(gen.version, 1); - assert_eq!(gen.input.len(), 1); - assert_eq!(gen.input[0].prev_hash, Default::default()); - assert_eq!(gen.input[0].prev_index, 0xFFFFFFFF); - assert_eq!(serialize(&gen.input[0].script_sig), - Ok("4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73".from_hex().unwrap())); + assert_eq!(gen.version, 1); + assert_eq!(gen.input.len(), 1); + assert_eq!(gen.input[0].prev_hash, Default::default()); + assert_eq!(gen.input[0].prev_index, 0xFFFFFFFF); + assert_eq!(serialize(&gen.input[0].script_sig), + Ok("4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73".from_hex().unwrap())); - assert_eq!(gen.input[0].sequence, MAX_SEQUENCE); - assert_eq!(gen.output.len(), 1); - assert_eq!(serialize(&gen.output[0].script_pubkey), - Ok("434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac".from_hex().unwrap())); - assert_eq!(gen.output[0].value, 50 * COIN_VALUE); - assert_eq!(gen.lock_time, 0); + assert_eq!(gen.input[0].sequence, MAX_SEQUENCE); + assert_eq!(gen.output.len(), 1); + assert_eq!(serialize(&gen.output[0].script_pubkey), + Ok("434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac".from_hex().unwrap())); + assert_eq!(gen.output[0].value, 50 * COIN_VALUE); + assert_eq!(gen.lock_time, 0); - assert_eq!(gen.bitcoin_hash().be_hex_string(), - "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); - } + assert_eq!(gen.bitcoin_hash().be_hex_string(), + "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); + } - #[test] - fn bitcoin_genesis_full_block() { - let gen = genesis_block(network::Bitcoin); + #[test] + fn bitcoin_genesis_full_block() { + let gen = genesis_block(network::Bitcoin); - assert_eq!(gen.header.version, 1); - assert_eq!(gen.header.prev_blockhash, Default::default()); - assert_eq!(gen.header.merkle_root.be_hex_string(), - "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); - assert_eq!(gen.header.time, 1231006505); - assert_eq!(gen.header.bits, 0x1d00ffff); - assert_eq!(gen.header.nonce, 2083236893); - assert_eq!(gen.header.bitcoin_hash().be_hex_string(), - "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f".to_string()); - } + assert_eq!(gen.header.version, 1); + assert_eq!(gen.header.prev_blockhash, Default::default()); + assert_eq!(gen.header.merkle_root.be_hex_string(), + "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); + assert_eq!(gen.header.time, 1231006505); + assert_eq!(gen.header.bits, 0x1d00ffff); + assert_eq!(gen.header.nonce, 2083236893); + assert_eq!(gen.header.bitcoin_hash().be_hex_string(), + "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f".to_string()); + } - #[test] - fn testnet_genesis_full_block() { - let gen = genesis_block(network::Testnet); - assert_eq!(gen.header.version, 1); - assert_eq!(gen.header.prev_blockhash, Default::default()); - assert_eq!(gen.header.merkle_root.be_hex_string(), - "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); - assert_eq!(gen.header.time, 1296688602); - assert_eq!(gen.header.bits, 0x1d00ffff); - assert_eq!(gen.header.nonce, 414098458); - assert_eq!(gen.header.bitcoin_hash().be_hex_string(), - "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943".to_string()); - } + #[test] + fn testnet_genesis_full_block() { + let gen = genesis_block(network::Testnet); + assert_eq!(gen.header.version, 1); + assert_eq!(gen.header.prev_blockhash, Default::default()); + assert_eq!(gen.header.merkle_root.be_hex_string(), + "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".to_string()); + assert_eq!(gen.header.time, 1296688602); + assert_eq!(gen.header.bits, 0x1d00ffff); + assert_eq!(gen.header.nonce, 414098458); + assert_eq!(gen.header.bitcoin_hash().be_hex_string(), + "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943".to_string()); + } } diff --git a/src/blockdata/opcodes.rs b/src/blockdata/opcodes.rs index da66ba79..8599c045 100644 --- a/src/blockdata/opcodes.rs +++ b/src/blockdata/opcodes.rs @@ -592,10 +592,10 @@ impl All { // 16 opcodes } else if All::OP_PUSHNUM_1 as u8 <= *self as u8 && *self as u8 <= All::OP_PUSHNUM_16 as u8 { - Class::PushNum(1 + *self as isize - All::OP_PUSHNUM_1 as isize) + Class::PushNum(1 + *self as i32 - All::OP_PUSHNUM_1 as i32) // 76 opcodes } else if *self as u8 <= All::OP_PUSHBYTES_75 as u8 { - Class::PushBytes(*self as usize) + Class::PushBytes(*self as u32) // 60 opcodes } else { Class::Ordinary(unsafe { transmute(*self) }) @@ -636,9 +636,9 @@ pub static OP_TRUE: All = All::OP_PUSHNUM_1; #[derive(Clone, PartialEq, Eq, Debug)] pub enum Class { /// Pushes the given number onto the stack - PushNum(isize), + PushNum(i32), /// Pushes the given number of bytes onto the stack - PushBytes(usize), + PushBytes(u32), /// Fails the script if executed ReturnOp, /// Fails the script even if not executed diff --git a/src/blockdata/script.rs b/src/blockdata/script.rs index 6370dfda..162fe54c 100644 --- a/src/blockdata/script.rs +++ b/src/blockdata/script.rs @@ -35,7 +35,7 @@ use crypto::digest::Digest; use crypto::ripemd160::Ripemd160; use crypto::sha1::Sha1; use crypto::sha2::Sha256; -use secp256k1::Secp256k1; +use secp256k1::{self, Secp256k1}; use secp256k1::key::PublicKey; use serde; @@ -56,13 +56,16 @@ impl Clone for Script { } } +#[derive(PartialEq, Eq, Debug, Clone, Display)] +/// An object which can be used to construct a script piece by piece +pub struct ScriptBuilder(Vec); + impl hash::Hash for Script { #[inline] fn hash(&self, state: &mut H) where H: hash::Hasher { - let &Script(ref raw) = self; - (&raw[..]).hash(state); + (&self.0[..]).hash(state); } #[inline] @@ -70,8 +73,7 @@ impl hash::Hash for Script { where H: hash::Hasher { for s in data.iter() { - let &Script(ref raw) = s; - (&raw[..]).hash(state); + (&s.0[..]).hash(state); } } } @@ -95,7 +97,7 @@ pub enum Error { /// OP_CHECKSIG was called with a bad signature BadSignature, /// An ECDSA error - Ecdsa(::secp256k1::Error), + Ecdsa(secp256k1::Error), /// An OP_ELSE happened while not in an OP_IF tree ElseWithoutIf, /// An OP_ENDIF happened while not in an OP_IF tree @@ -1558,7 +1560,7 @@ fn check_signature(sig_slice: &[u8], pk_slice: &[u8], script: Vec, for _ in 0..input_index { new_outs.push(Default::default()) } - new_outs.push(tx_copy.output.swap_remove(input_index).unwrap()); + new_outs.push(tx_copy.output.swap_remove(input_index)); tx_copy.output = new_outs; } else { sighash_single_bug = true; @@ -1581,7 +1583,10 @@ fn check_signature(sig_slice: &[u8], pk_slice: &[u8], script: Vec, serialize(&Sha256dHash::from_data(&data_to_sign[..])).unwrap() }; - Secp256k1::verify_raw(&signature_hash[..], sig_slice, &pubkey).map_err(Error::Ecdsa) + // We can unwrap -- only failure mode is on length, which is fixed to 32 + let msg = secp256k1::Message::from_slice(&signature_hash[..]).unwrap(); + + Secp256k1::verify_raw(&msg, sig_slice, &pubkey).map_err(Error::Ecdsa) } // Macro to translate English stack instructions into Rust code. @@ -1719,70 +1724,7 @@ impl Script { pub fn from_vec(v: Vec) -> Script { Script(v.into_boxed_slice()) } /// The length in bytes of the script - pub fn len(&self) -> usize { - let &Script(ref raw) = self; - raw.len() - } - - /// Adds instructions to push an integer onto the stack. Integers are - /// encoded as little-endian signed-magnitude numbers, but there are - /// dedicated opcodes to push some small integers. - pub fn push_int(&mut self, data: i64) { - // We can special-case -1, 1-16 - if data == -1 || (data >= 1 && data <=16) { - let &Script(ref mut raw) = self; - raw.push(data as u8 + opcodes::All::OP_TRUE as u8); - return; - } - // We can also special-case zero - if data == 0 { - let &Script(ref mut raw) = self; - raw.push(opcodes::All::OP_FALSE as u8); - return; - } - // Otherwise encode it as data - self.push_scriptint(data); - } - - /// Adds instructions to push an integer onto the stack, using the explicit - /// encoding regardless of the availability of dedicated opcodes. - pub fn push_scriptint(&mut self, data: i64) { - self.push_slice(&build_scriptint(data)); - } - - /// Adds instructions to push some arbitrary data onto the stack - pub fn push_slice(&mut self, data: &[u8]) { - let &Script(ref mut raw) = self; - // Start with a PUSH opcode - match data.len() { - n if n < opcodes::Ordinary::OP_PUSHDATA1 as usize => { raw.push(n as u8); }, - n if n < 0x100 => { - raw.push(opcodes::Ordinary::OP_PUSHDATA1 as u8); - raw.push(n as u8); - }, - n if n < 0x10000 => { - raw.push(opcodes::Ordinary::OP_PUSHDATA2 as u8); - raw.push((n % 0x100) as u8); - raw.push((n / 0x100) as u8); - }, - n if n < 0x100000000 => { - raw.push(opcodes::Ordinary::OP_PUSHDATA4 as u8); - raw.push((n % 0x100) as u8); - raw.push(((n / 0x100) % 0x100) as u8); - raw.push(((n / 0x10000) % 0x100) as u8); - raw.push((n / 0x1000000) as u8); - } - _ => panic!("tried to put a 4bn+ sized object into a script!") - } - // Then push the acraw - raw.extend(data.iter().map(|n| *n)); - } - - /// Adds an individual opcode to the script - pub fn push_opcode(&mut self, data: opcodes::All) { - let &Script(ref mut raw) = self; - raw.push(data as u8); - } + pub fn len(&self) -> usize { self.0.len() } /// Trace a script pub fn trace<'a>(&'a self, stack: &mut Vec>, @@ -1807,21 +1749,19 @@ impl Script { input_context: Option<(&Transaction, usize)>, mut trace: Option<&mut Vec>) -> Result<(), Error> { - let &Script(ref raw) = self; - let mut codeseparator_index = 0; let mut exec_stack = vec![]; let mut alt_stack = vec![]; let mut index = 0; let mut op_count = 0; - while index < raw.len() { + while index < self.0.len() { let executing = exec_stack.iter().all(|e| *e); - let byte = unsafe { *raw.get(index) }; + let byte = self.0[index]; // Write out the trace, except the stack which we don't know yet match trace { Some(ref mut t) => { - let opcode = opcodes::All::Opcode::from_u8(byte); + let opcode = opcodes::All::from_u8(byte); t.push(TraceIteration { index: index, opcode: opcode, @@ -1837,7 +1777,7 @@ impl Script { op_count += 1; index += 1; // The definitions of all these categories are in opcodes.rs - match (executing, opcodes::All::Opcode::from_u8(byte).classify()) { + match (executing, opcodes::All::from_u8(byte).classify()) { // Illegal operations mean failure regardless of execution state (_, opcodes::Class::IllegalOp) => return Err(Error::IllegalOpcode), // Push number @@ -1846,29 +1786,30 @@ impl Script { (true, opcodes::Class::ReturnOp) => return Err(Error::ExecutedReturn), // Data-reading statements still need to read, even when not executing (_, opcodes::Class::PushBytes(n)) => { - if raw.len() < index + n { return Err(Error::EarlyEndOfScript); } - if executing { stack.push(MaybeOwned::Borrowed(raw.slice(index, index + n))); } + let n = n as usize; + if self.0.len() < index + n { return Err(Error::EarlyEndOfScript); } + if executing { stack.push(MaybeOwned::Borrowed(&self.0[index..index + n])); } index += n; } (_, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA1)) => { - if raw.len() < index + 1 { return Err(Error::EarlyEndOfScript); } - let n = try!(read_uint(&raw[index..], 1)); - if raw.len() < index + 1 + n { return Err(Error::EarlyEndOfScript); } - if executing { stack.push(MaybeOwned::Borrowed(raw.slice(index + 1, index + n + 1))); } + if self.0.len() < index + 1 { return Err(Error::EarlyEndOfScript); } + let n = try!(read_uint(&self.0[index..], 1)); + if self.0.len() < index + 1 + n { return Err(Error::EarlyEndOfScript); } + if executing { stack.push(MaybeOwned::Borrowed(&self.0[index + 1..index + n + 1])); } index += 1 + n; } (_, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA2)) => { - if raw.len() < index + 2 { return Err(Error::EarlyEndOfScript); } - let n = try!(read_uint(&raw[index..], 2)); - if raw.len() < index + 2 + n { return Err(Error::EarlyEndOfScript); } - if executing { stack.push(MaybeOwned::Borrowed(raw.slice(index + 2, index + n + 2))); } + if self.0.len() < index + 2 { return Err(Error::EarlyEndOfScript); } + let n = try!(read_uint(&self.0[index..], 2)); + if self.0.len() < index + 2 + n { return Err(Error::EarlyEndOfScript); } + if executing { stack.push(MaybeOwned::Borrowed(&self.0[index + 2..index + n + 2])); } index += 2 + n; } (_, opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA4)) => { - if raw.len() < index + 4 { return Err(Error::EarlyEndOfScript); } - let n = try!(read_uint(&raw[index..], 4)); - if raw.len() < index + 4 + n { return Err(Error::EarlyEndOfScript); } - if executing { stack.push(MaybeOwned::Borrowed(raw.slice(index + 4, index + n + 4))); } + if self.0.len() < index + 4 { return Err(Error::EarlyEndOfScript); } + let n = try!(read_uint(&self.0[index..], 4)); + if self.0.len() < index + 4 + n { return Err(Error::EarlyEndOfScript); } + if executing { stack.push(MaybeOwned::Borrowed(&self.0[index + 4..index + n + 4])); } index += 4 + n; } // If-statements take effect when not executing @@ -1942,7 +1883,7 @@ impl Script { opcodes::Ordinary::OP_OVER => stack_opcode!(stack(2): copy 2), opcodes::Ordinary::OP_PICK => { let n = match stack.pop() { - Some(data) => try!(read_scriptint(&data)), + Some(data) => try!(read_scriptint(&data[..])), None => { return Err(Error::PopEmptyStack); } }; if n < 0 { return Err(Error::NegativePick); } @@ -1951,7 +1892,7 @@ impl Script { } opcodes::Ordinary::OP_ROLL => { let n = match stack.pop() { - Some(data) => try!(read_scriptint(&data)), + Some(data) => try!(read_scriptint(&data[..])), None => { return Err(Error::PopEmptyStack); } }; if n < 0 { return Err(Error::NegativeRoll); } @@ -2033,11 +1974,12 @@ impl Script { // Compute the section of script that needs to be hashed: everything // from the last CODESEPARATOR, except the signature itself. - let mut script = (&raw[codeseparator_index..]).to_vec(); - let mut remove = Script::new(); + let mut script = (&self.0[codeseparator_index..]).to_vec(); + let mut remove = ScriptBuilder::new(); remove.push_slice(sig_slice); - script_find_and_remove(&mut script, &remove); - script_find_and_remove(&mut script, [opcodes::Ordinary::OP_CODESEPARATOR as u8]); + script_find_and_remove(&mut script, &remove[..]); + // Also all of the OP_CODESEPARATORS, even the unevaluated ones + script_find_and_remove(&mut script, &[opcodes::Ordinary::OP_CODESEPARATOR as u8]); // This is as far as we can go without a transaction, so fail here if input_context.is_none() { return Err(Error::NoTransaction); } @@ -2053,7 +1995,7 @@ impl Script { opcodes::Ordinary::OP_CHECKMULTISIG | opcodes::Ordinary::OP_CHECKMULTISIGVERIFY => { // Read all the keys if stack.len() < 1 { return Err(Error::PopEmptyStack); } - let n_keys = try!(read_scriptint(&stack.pop().unwrap())); + let n_keys = try!(read_scriptint(&stack.pop().unwrap()[..])); if n_keys < 0 || n_keys > 20 { return Err(Error::MultisigBadKeyCount(n_keys as isize)); } @@ -2066,7 +2008,7 @@ impl Script { // Read all the signatures if stack.len() < 1 { return Err(Error::PopEmptyStack); } - let n_sigs = try!(read_scriptint(&stack.pop().unwrap())); + let n_sigs = try!(read_scriptint(&stack.pop().unwrap()[..])); if n_sigs < 0 || n_sigs > n_keys { return Err(Error::MultisigBadSigCount(n_sigs as isize)); } @@ -2082,12 +2024,12 @@ impl Script { // Compute the section of script that needs to be hashed: everything // from the last CODESEPARATOR, except the signatures themselves. - let mut script = (&raw[codeseparator_index..]).to_vec(); + let mut script = (&self.0[codeseparator_index..]).to_vec(); for sig in sigs.iter() { - let mut remove = Script::new(); - remove.push_slice(&sig); - script_find_and_remove(&mut script, &remove); - script_find_and_remove(&mut script, [opcodes::Ordinary::OP_CODESEPARATOR as u8]); + let mut remove = ScriptBuilder::new(); + remove.push_slice(&sig[..]); + script_find_and_remove(&mut script, &remove[..]); + script_find_and_remove(&mut script, &[opcodes::Ordinary::OP_CODESEPARATOR as u8]); } // This is as far as we can go without a transaction, so fail here @@ -2105,7 +2047,7 @@ impl Script { // Try to validate the signature with the given key (Some(k), Some(s)) => { // Move to the next signature if it is valid for the current key - if check_signature(&s, &k, script.clone(), tx, input_index).is_ok() { + if check_signature(&s[..], &k[..], script.clone(), tx, input_index).is_ok() { sig = sig_iter.next(); } // Move to the next key in any case @@ -2142,12 +2084,11 @@ impl Script { /// Checks whether a script pubkey is a p2sh output #[inline] pub fn is_p2sh(&self) -> bool { - let &Script(ref raw) = self; unsafe { - raw.len() == 23 && - *raw.get(0) == opcodes::All::OP_HASH160 as u8 && - *raw.get(1) == opcodes::All::OP_PUSHBYTES_20 as u8 && - *raw.get(22) == opcodes::All::OP_EQUAL as u8 + self.0.len() == 23 && + self.0[0] == opcodes::All::OP_HASH160 as u8 && + self.0[1] == opcodes::All::OP_PUSHBYTES_20 as u8 && + self.0[22] == opcodes::All::OP_EQUAL as u8 } } @@ -2186,9 +2127,10 @@ impl Script { (true, opcodes::Class::ReturnOp) => return Err(Error::ExecutedReturn), // Data-reading statements still need to read, even when not executing (_, opcodes::Class::PushBytes(n)) => { + let n = n as usize; if script.len() < index + n { return Err(Error::EarlyEndOfScript); } if executing { - stack.push_alloc(AbstractStackElem::new_raw(script.slice(index, index + n))); + stack.push_alloc(AbstractStackElem::new_raw(&script[index..index + n])); } index += n; } @@ -2200,7 +2142,7 @@ impl Script { }; if script.len() < index + 1 + n { return Err(Error::EarlyEndOfScript); } if executing { - stack.push_alloc(AbstractStackElem::new_raw(script.slice(index + 1, index + n + 1))); + stack.push_alloc(AbstractStackElem::new_raw(&script[index + 1..index + n + 1])); } index += 1 + n; } @@ -2212,7 +2154,7 @@ impl Script { }; if script.len() < index + 2 + n { return Err(Error::EarlyEndOfScript); } if executing { - stack.push_alloc(AbstractStackElem::new_raw(script.slice(index + 2, index + n + 2))); + stack.push_alloc(AbstractStackElem::new_raw(&script[index + 2..index + n + 2])); } index += 2 + n; } @@ -2223,7 +2165,7 @@ impl Script { }; if script.len() < index + 4 + n { return Err(Error::EarlyEndOfScript); } if executing { - stack.push_alloc(AbstractStackElem::new_raw(script.slice(index + 4, index + n + 4))); + stack.push_alloc(AbstractStackElem::new_raw(&script[index + 4..index + n + 4])); } index += 4 + n; } @@ -2495,8 +2437,7 @@ impl Script { } } - let &Script(ref raw) = self; - recurse(&raw, AbstractStack::new(), vec![], 1) + recurse(&self.0, AbstractStack::new(), vec![], 1) } } @@ -2504,62 +2445,93 @@ impl Default for Script { fn default() -> Script { Script(vec![].into_boxed_slice()) } } +impl_index_newtype!(Script, u8); -impl ops::Index for Script { - type Output = u8; - #[inline] - fn index(&self, index: usize) -> &u8 { - let &Script(ref raw) = self; - &raw[index] +impl ScriptBuilder { + /// Creates a new empty script + pub fn new() -> ScriptBuilder { ScriptBuilder(vec![]) } + + /// Creates a new script from an existing vector + pub fn from_vec(v: Vec) -> ScriptBuilder { ScriptBuilder(v) } + + /// The length in bytes of the script + pub fn len(&self) -> usize { self.0.len() } + + /// Adds instructions to push an integer onto the stack. Integers are + /// encoded as little-endian signed-magnitude numbers, but there are + /// dedicated opcodes to push some small integers. + pub fn push_int(&mut self, data: i64) { + // We can special-case -1, 1-16 + if data == -1 || (data >= 1 && data <=16) { + self.0.push(data as u8 + opcodes::OP_TRUE as u8); + } + // We can also special-case zero + else if data == 0 { + self.0.push(opcodes::OP_FALSE as u8); + } + // Otherwise encode it as data + else { self.push_scriptint(data); } + } + + /// Adds instructions to push an integer onto the stack, using the explicit + /// encoding regardless of the availability of dedicated opcodes. + pub fn push_scriptint(&mut self, data: i64) { + self.push_slice(&build_scriptint(data)); + } + + /// Adds instructions to push some arbitrary data onto the stack + pub fn push_slice(&mut self, data: &[u8]) { + // Start with a PUSH opcode + match data.len() { + n if n < opcodes::Ordinary::OP_PUSHDATA1 as usize => { self.0.push(n as u8); }, + n if n < 0x100 => { + self.0.push(opcodes::Ordinary::OP_PUSHDATA1 as u8); + self.0.push(n as u8); + }, + n if n < 0x10000 => { + self.0.push(opcodes::Ordinary::OP_PUSHDATA2 as u8); + self.0.push((n % 0x100) as u8); + self.0.push((n / 0x100) as u8); + }, + n if n < 0x100000000 => { + self.0.push(opcodes::Ordinary::OP_PUSHDATA4 as u8); + self.0.push((n % 0x100) as u8); + self.0.push(((n / 0x100) % 0x100) as u8); + self.0.push(((n / 0x10000) % 0x100) as u8); + self.0.push((n / 0x1000000) as u8); + } + _ => panic!("tried to put a 4bn+ sized object into a script!") + } + // Then push the acraw + self.0.extend(data.iter().map(|n| *n)); + } + + pub fn push_opcode(&mut self, data: opcodes::All) { + self.0.push(data as u8); + } + + pub fn into_script(self) -> Script { + Script(self.0.into_boxed_slice()) } } -impl ops::Index> for Script { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::Range) -> &[u8] { - let &Script(ref raw) = self; - &raw[index] - } +/// Adds an individual opcode to the script +impl Default for ScriptBuilder { + fn default() -> ScriptBuilder { ScriptBuilder(vec![]) } } -impl ops::Index> for Script { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::RangeTo) -> &[u8] { - let &Script(ref raw) = self; - &raw[index] - } -} - -impl ops::Index> for Script { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::RangeFrom) -> &[u8] { - let &Script(ref raw) = self; - &raw[index] - } -} - -impl ops::Index for Script { - type Output = [u8]; - #[inline] - fn index(&self, _: ops::RangeFull) -> &[u8] { - let &Script(ref raw) = self; - &raw[..] - } -} +impl_index_newtype!(ScriptBuilder, u8); // User-facing serialization impl serde::Serialize for Script { fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> where S: serde::Serializer, { - let &Script(ref raw) = self; - for dat in raw.iter() { - serializer.visit_char(from_digit((dat / 0x10) as u32, 16).unwrap()); - serializer.visit_char(from_digit((dat & 0x0f) as u32, 16).unwrap()); + for dat in self.0.iter() { + try!(serializer.visit_char(from_digit((dat / 0x10) as u32, 16).unwrap())); + try!(serializer.visit_char(from_digit((dat & 0x0f) as u32, 16).unwrap())); } + Ok(()) } } @@ -2567,8 +2539,7 @@ impl serde::Serialize for Script { impl ConsensusEncodable for Script { #[inline] fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - let &Script(ref data) = self; - data.consensus_encode(s) + self.0.consensus_encode(s) } } diff --git a/src/blockdata/transaction.rs b/src/blockdata/transaction.rs index be7e84c5..35af3e63 100644 --- a/src/blockdata/transaction.rs +++ b/src/blockdata/transaction.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -32,271 +32,272 @@ use blockdata::utxoset::UtxoSet; use network::encodable::ConsensusEncodable; use network::serialize::BitcoinHash; use network::constants::Network; -use wallet::address::Address; +use wallet::address::{Address, ToAddress}; /// A transaction input, which defines old coins to be consumed #[derive(Clone, PartialEq, Eq, Debug)] pub struct TxIn { - /// The hash of the transaction whose output is being used an an input - pub prev_hash: Sha256dHash, - /// The index of the output in the previous transaction, which may have several - pub prev_index: u32, - /// The script which pushes values on the stack which will cause - /// the referenced output's script to accept - pub script_sig: Script, - /// The sequence number, which suggests to miners which of two - /// conflicting transactions should be preferred, or 0xFFFFFFFF - /// to ignore this feature. This is generally never used since - /// the miner behaviour cannot be enforced. - pub sequence: u32, + /// The hash of the transaction whose output is being used an an input + pub prev_hash: Sha256dHash, + /// The index of the output in the previous transaction, which may have several + pub prev_index: u32, + /// The script which pushes values on the stack which will cause + /// the referenced output's script to accept + pub script_sig: Script, + /// The sequence number, which suggests to miners which of two + /// conflicting transactions should be preferred, or 0xFFFFFFFF + /// to ignore this feature. This is generally never used since + /// the miner behaviour cannot be enforced. + pub sequence: u32, } /// A transaction output, which defines new coins to be created from old ones. #[derive(Clone, PartialEq, Eq, Debug)] pub struct TxOut { - /// The value of the output, in satoshis - pub value: u64, - /// The script which must satisfy for the output to be spent - pub script_pubkey: Script + /// The value of the output, in satoshis + pub value: u64, + /// The script which must satisfy for the output to be spent + pub script_pubkey: Script } // This is used as a "null txout" in consensus signing code impl Default for TxOut { - fn default() -> TxOut { - TxOut { value: 0xffffffffffffffff, script_pubkey: Script::new() } - } + fn default() -> TxOut { + TxOut { value: 0xffffffffffffffff, script_pubkey: Script::new() } + } } /// A classification for script pubkeys pub enum ScriptPubkeyTemplate { - /// A pay-to-address output - PayToPubkeyHash(Address), - /// Another kind of output - Unknown + /// A pay-to-address output + PayToPubkeyHash(Address), + /// Another kind of output + Unknown } impl TxOut { - /// Determines the template that this output adheres to, if any - pub fn classify(&self, network: Network) -> ScriptPubkeyTemplate { - if self.script_pubkey.len() == 25 && - self.script_pubkey.slice_to(3) == &[0x76, 0xa9, 0x14] && - self.script_pubkey.slice_from(23) == &[0x88, 0xac] { - ScriptPubkeyTemplate::PayToPubkeyHash(self.script_pubkey.slice(3, 23).to_address(network)) - } else { - ScriptPubkeyTemplate::Unknown + /// Determines the template that this output adheres to, if any + pub fn classify(&self, network: Network) -> ScriptPubkeyTemplate { + if self.script_pubkey.len() == 25 && + &self.script_pubkey[0..3] == &[0x76, 0xa9, 0x14] && + &self.script_pubkey[23..] == &[0x88, 0xac] { + ScriptPubkeyTemplate::PayToPubkeyHash((&self.script_pubkey[3..23]).to_address(network)) + } else { + ScriptPubkeyTemplate::Unknown + } } - } } /// A Bitcoin transaction, which describes an authenticated movement of coins #[derive(Clone, PartialEq, Eq, Debug)] pub struct Transaction { - /// The protocol version, should always be 1. - pub version: u32, - /// Block number before which this transaction is valid, or 0 for - /// valid immediately. - pub lock_time: u32, - /// List of inputs - pub input: Vec, - /// List of outputs - pub output: Vec + /// The protocol version, should always be 1. + pub version: u32, + /// Block number before which this transaction is valid, or 0 for + /// valid immediately. + pub lock_time: u32, + /// List of inputs + pub input: Vec, + /// List of outputs + pub output: Vec } /// A transaction error #[derive(PartialEq, Eq, Clone, Debug)] pub enum Error { - /// Concatenated script failed in the input half (script error) - InputScriptFailure(script::Error), - /// Concatenated script failed in the output half (script error) - OutputScriptFailure(script::Error), - /// P2SH serialized script failed (script error) - P2shScriptFailure(script::Error), - /// P2SH serialized script ended with false at the top of the stack - P2shScriptReturnedFalse, - /// P2SH serialized script ended with nothing in the stack - P2shScriptReturnedEmptyStack, - /// Script ended with false at the top of the stack - ScriptReturnedFalse, - /// Script ended with nothing in the stack - ScriptReturnedEmptyStack, - /// Script ended with nothing in the stack (input txid, input vout) - InputNotFound(Sha256dHash, u32), + /// Concatenated script failed in the input half (script error) + InputScriptFailure(script::Error), + /// Concatenated script failed in the output half (script error) + OutputScriptFailure(script::Error), + /// P2SH serialized script failed (script error) + P2shScriptFailure(script::Error), + /// P2SH serialized script ended with false at the top of the stack + P2shScriptReturnedFalse, + /// P2SH serialized script ended with nothing in the stack + P2shScriptReturnedEmptyStack, + /// Script ended with false at the top of the stack + ScriptReturnedFalse, + /// Script ended with nothing in the stack + ScriptReturnedEmptyStack, + /// Script ended with nothing in the stack (input txid, input vout) + InputNotFound(Sha256dHash, u32), } +display_from_debug!(Error); impl serde::Serialize for Error { - fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> - where S: serde::Serializer, - { - serializer.visit_str(&self.to_string()) - } + fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> + where S: serde::Serializer, + { + serializer.visit_str(&self.to_string()) + } } /// A trace of a transaction input's script execution #[derive(PartialEq, Eq, Clone, Debug)] pub struct InputTrace { - input_txid: Sha256dHash, - input_vout: usize, - sig_trace: ScriptTrace, - pubkey_trace: Option, - p2sh_trace: Option, - error: Option + input_txid: Sha256dHash, + input_vout: usize, + sig_trace: ScriptTrace, + pubkey_trace: Option, + p2sh_trace: Option, + error: Option } /// A trace of a transaction's execution #[derive(PartialEq, Eq, Clone, Debug)] pub struct TransactionTrace { - txid: Sha256dHash, - inputs: Vec + txid: Sha256dHash, + inputs: Vec } impl TxIn { - /// Check an input's script for validity - pub fn validate(&self, - utxoset: &UtxoSet, - txn: &Transaction, - index: usize) -> Result<(), Error> { - let txo = utxoset.get_utxo(self.prev_hash, self.prev_index); - match txo { - Some((_, txo)) => { - let mut p2sh_stack = Vec::new(); - let mut p2sh_script = Script::new(); + /// Check an input's script for validity + pub fn validate(&self, + utxoset: &UtxoSet, + txn: &Transaction, + index: usize) -> Result<(), Error> { + let txo = utxoset.get_utxo(self.prev_hash, self.prev_index); + match txo { + Some((_, txo)) => { + let mut p2sh_stack = Vec::new(); + let mut p2sh_script = Script::new(); - let mut stack = Vec::with_capacity(6); - match self.script_sig.evaluate(&mut stack, Some((txn, index)), None) { - Ok(_) => {} - Err(e) => { return Err(Error::InputScriptFailure(e)); } - } - if txo.script_pubkey.is_p2sh() && stack.len() > 0 { - p2sh_stack = stack.clone(); - p2sh_script = match p2sh_stack.pop() { - Some(script::MaybeOwned::Owned(v)) => Script::from_vec(v), - Some(script::MaybeOwned::Borrowed(s)) => Script::from_vec(s.to_vec()), - None => unreachable!() - }; - } - match txo.script_pubkey.evaluate(&mut stack, Some((txn, index)), None) { - Ok(_) => {} - Err(e) => { return Err(Error::OutputScriptFailure(e)); } - } - match stack.pop() { - Some(v) => { - if !read_scriptbool(&v[..]) { - return Err(Error::ScriptReturnedFalse); + let mut stack = Vec::with_capacity(6); + match self.script_sig.evaluate(&mut stack, Some((txn, index)), None) { + Ok(_) => {} + Err(e) => { return Err(Error::InputScriptFailure(e)); } + } + if txo.script_pubkey.is_p2sh() && stack.len() > 0 { + p2sh_stack = stack.clone(); + p2sh_script = match p2sh_stack.pop() { + Some(script::MaybeOwned::Owned(v)) => Script::from_vec(v), + Some(script::MaybeOwned::Borrowed(s)) => Script::from_vec(s.to_vec()), + None => unreachable!() + }; + } + match txo.script_pubkey.evaluate(&mut stack, Some((txn, index)), None) { + Ok(_) => {} + Err(e) => { return Err(Error::OutputScriptFailure(e)); } + } + match stack.pop() { + Some(v) => { + if !read_scriptbool(&v[..]) { + return Err(Error::ScriptReturnedFalse); + } + } + None => { return Err(Error::ScriptReturnedEmptyStack); } + } + if txo.script_pubkey.is_p2sh() { + match p2sh_script.evaluate(&mut p2sh_stack, Some((txn, index)), None) { + Ok(_) => {} + Err(e) => { return Err(Error::P2shScriptFailure(e)); } + } + match p2sh_stack.pop() { + Some(v) => { + if !read_scriptbool(&v[..]) { + return Err(Error::P2shScriptReturnedFalse); + } + } + None => { return Err(Error::P2shScriptReturnedEmptyStack); } + } + } } - } - None => { return Err(Error::ScriptReturnedEmptyStack); } + None => { return Err(Error::InputNotFound(self.prev_hash, self.prev_index)); } } - if txo.script_pubkey.is_p2sh() { - match p2sh_script.evaluate(&mut p2sh_stack, Some((txn, index)), None) { - Ok(_) => {} - Err(e) => { return Err(Error::P2shScriptFailure(e)); } - } - match p2sh_stack.pop() { - Some(v) => { - if !read_scriptbool(&v[..]) { - return Err(Error::P2shScriptReturnedFalse); - } - } - None => { return Err(Error::P2shScriptReturnedEmptyStack); } - } - } - } - None => { return Err(Error::InputNotFound(self.prev_hash, self.prev_index)); } + Ok(()) } - Ok(()) - } } impl Transaction { - /// Check a transaction for validity - pub fn validate(&self, utxoset: &UtxoSet) -> Result<(), Error> { - for (n, input) in self.input.iter().enumerate() { - try!(input.validate(utxoset, self, n)); + /// Check a transaction for validity + pub fn validate(&self, utxoset: &UtxoSet) -> Result<(), Error> { + for (n, input) in self.input.iter().enumerate() { + try!(input.validate(utxoset, self, n)); + } + Ok(()) } - Ok(()) - } - /// Produce a trace of a transaction's execution - pub fn trace(&self, utxoset: &UtxoSet) -> TransactionTrace { - let mut ret = TransactionTrace { txid: self.bitcoin_hash(), - inputs: Vec::with_capacity(self.input.len()) }; - for (n, input) in self.input.iter().enumerate() { - // Setup trace - let mut trace = InputTrace { - input_txid: input.prev_hash, - input_vout: input.prev_index as usize, - sig_trace: ScriptTrace { - script: Script::new(), - initial_stack: vec![], - iterations: vec![], - error: None - }, - pubkey_trace: None, - p2sh_trace: None, - error: None - }; - // Run through the input - let txo = utxoset.get_utxo(input.prev_hash, input.prev_index); - match txo { - Some((_, txo)) => { - let mut p2sh_stack = Vec::new(); - let mut p2sh_script = Script::new(); - - let mut stack = Vec::with_capacity(6); - trace.sig_trace = input.script_sig.trace(&mut stack, Some((self, n))); - let err = trace.sig_trace.error.as_ref().map(|e| e.clone()); - err.map(|e| trace.error = Some(Error::InputScriptFailure(e))); - - if txo.script_pubkey.is_p2sh() && stack.len() > 0 { - p2sh_stack = stack.clone(); - p2sh_script = match p2sh_stack.pop() { - Some(script::MaybeOwned::Owned(v)) => Script::from_vec(v), - Some(script::MaybeOwned::Borrowed(s)) => Script::from_vec(s.to_vec()), - None => unreachable!() + /// Produce a trace of a transaction's execution + pub fn trace(&self, utxoset: &UtxoSet) -> TransactionTrace { + let mut ret = TransactionTrace { txid: self.bitcoin_hash(), + inputs: Vec::with_capacity(self.input.len()) }; + for (n, input) in self.input.iter().enumerate() { + // Setup trace + let mut trace = InputTrace { + input_txid: input.prev_hash, + input_vout: input.prev_index as usize, + sig_trace: ScriptTrace { + script: Script::new(), + initial_stack: vec![], + iterations: vec![], + error: None + }, + pubkey_trace: None, + p2sh_trace: None, + error: None }; - } - if trace.error.is_none() { - trace.pubkey_trace = Some(txo.script_pubkey.trace(&mut stack, Some((self, n)))); - let err = trace.pubkey_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); - err.map(|e| trace.error = Some(Error::OutputScriptFailure(e))); - match stack.pop() { - Some(v) => { - if !read_scriptbool(&v[..]) { - trace.error = Some(Error::ScriptReturnedFalse); + // Run through the input + let txo = utxoset.get_utxo(input.prev_hash, input.prev_index); + match txo { + Some((_, txo)) => { + let mut p2sh_stack = Vec::new(); + let mut p2sh_script = Script::new(); + + let mut stack = Vec::with_capacity(6); + trace.sig_trace = input.script_sig.trace(&mut stack, Some((self, n))); + let err = trace.sig_trace.error.as_ref().map(|e| e.clone()); + err.map(|e| trace.error = Some(Error::InputScriptFailure(e))); + + if txo.script_pubkey.is_p2sh() && stack.len() > 0 { + p2sh_stack = stack.clone(); + p2sh_script = match p2sh_stack.pop() { + Some(script::MaybeOwned::Owned(v)) => Script::from_vec(v), + Some(script::MaybeOwned::Borrowed(s)) => Script::from_vec(s.to_vec()), + None => unreachable!() + }; + } + if trace.error.is_none() { + trace.pubkey_trace = Some(txo.script_pubkey.trace(&mut stack, Some((self, n)))); + let err = trace.pubkey_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); + err.map(|e| trace.error = Some(Error::OutputScriptFailure(e))); + match stack.pop() { + Some(v) => { + if !read_scriptbool(&v[..]) { + trace.error = Some(Error::ScriptReturnedFalse); + } + } + None => { trace.error = Some(Error::ScriptReturnedEmptyStack); } + } + if trace.error.is_none() && txo.script_pubkey.is_p2sh() { + trace.p2sh_trace = Some(p2sh_script.trace(&mut p2sh_stack, Some((self, n)))); + let err = trace.p2sh_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); + err.map(|e| trace.error = Some(Error::P2shScriptFailure(e))); + match p2sh_stack.pop() { + Some(v) => { + if !read_scriptbool(&v[..]) { + trace.error = Some(Error::P2shScriptReturnedFalse); + } + } + None => { trace.error = Some(Error::P2shScriptReturnedEmptyStack); } + } + } + } } - } - None => { trace.error = Some(Error::ScriptReturnedEmptyStack); } - } - if trace.error.is_none() && txo.script_pubkey.is_p2sh() { - trace.p2sh_trace = Some(p2sh_script.trace(&mut p2sh_stack, Some((self, n)))); - let err = trace.p2sh_trace.as_ref().unwrap().error.as_ref().map(|e| e.clone()); - err.map(|e| trace.error = Some(Error::P2shScriptFailure(e))); - match p2sh_stack.pop() { - Some(v) => { - if !read_scriptbool(&v[..]) { - trace.error = Some(Error::P2shScriptReturnedFalse); - } + None => { + trace.error = Some(Error::InputNotFound(input.prev_hash, input.prev_index)); } - None => { trace.error = Some(Error::P2shScriptReturnedEmptyStack); } - } } - } + ret.inputs.push(trace); } - None => { - trace.error = Some(Error::InputNotFound(input.prev_hash, input.prev_index)); - } - } - ret.inputs.push(trace); + ret } - ret - } } impl BitcoinHash for Transaction { - fn bitcoin_hash(&self) -> Sha256dHash { - use network::serialize::serialize; - Sha256dHash::from_data(&serialize(self).unwrap()) - } + fn bitcoin_hash(&self) -> Sha256dHash { + use network::serialize::serialize; + Sha256dHash::from_data(&serialize(self).unwrap()) + } } impl_consensus_encoding!(TxIn, prev_hash, prev_index, script_sig, sequence); @@ -305,40 +306,40 @@ impl_consensus_encoding!(Transaction, version, input, output, lock_time); #[cfg(test)] mod tests { - use super::{Transaction, TxIn}; + use super::{Transaction, TxIn}; - use std::io; + use std::io; - use network::serialize::BitcoinHash; - use network::serialize::deserialize; - use util::misc::hex_bytes; + use network::serialize::BitcoinHash; + use network::serialize::deserialize; + use util::misc::hex_bytes; - #[test] - fn test_txin() { - let txin: io::Result = deserialize(hex_bytes("a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff").unwrap()); - assert!(txin.is_ok()); - } + #[test] + fn test_txin() { + let txin: io::Result = deserialize(hex_bytes("a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff").unwrap()); + assert!(txin.is_ok()); + } - #[test] - fn test_transaction() { - let hex_tx = hex_bytes("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap(); - let tx: io::Result = deserialize(hex_tx); - assert!(tx.is_ok()); - let realtx = tx.unwrap(); - // All these tests aren't really needed because if they fail, the hash check at the end - // will also fail. But these will show you where the failure is so I'll leave them in. - assert_eq!(realtx.version, 1); - assert_eq!(realtx.input.len(), 1); - // In particular this one is easy to get backward -- in bitcoin hashes are encoded - // as little-endian 256-bit numbers rather than as data strings. - assert_eq!(realtx.input[0].prev_hash.be_hex_string(), - "ce9ea9f6f5e422c6a9dbcddb3b9a14d1c78fab9ab520cb281aa2a74a09575da1".to_string()); - assert_eq!(realtx.input[0].prev_index, 1); - assert_eq!(realtx.output.len(), 1); - assert_eq!(realtx.lock_time, 0); + #[test] + fn test_transaction() { + let hex_tx = hex_bytes("0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000").unwrap(); + let tx: io::Result = deserialize(hex_tx); + assert!(tx.is_ok()); + let realtx = tx.unwrap(); + // All these tests aren't really needed because if they fail, the hash check at the end + // will also fail. But these will show you where the failure is so I'll leave them in. + assert_eq!(realtx.version, 1); + assert_eq!(realtx.input.len(), 1); + // In particular this one is easy to get backward -- in bitcoin hashes are encoded + // as little-endian 256-bit numbers rather than as data strings. + assert_eq!(realtx.input[0].prev_hash.be_hex_string(), + "ce9ea9f6f5e422c6a9dbcddb3b9a14d1c78fab9ab520cb281aa2a74a09575da1".to_string()); + assert_eq!(realtx.input[0].prev_index, 1); + assert_eq!(realtx.output.len(), 1); + assert_eq!(realtx.lock_time, 0); - assert_eq!(realtx.bitcoin_hash().be_hex_string(), - "a6eab3c14ab5272a58a5ba91505ba1a4b6d7a3a9fcbd187b6cd99a7b6d548cb7".to_string()); - } + assert_eq!(realtx.bitcoin_hash().be_hex_string(), + "a6eab3c14ab5272a58a5ba91505ba1a4b6d7a3a9fcbd187b6cd99a7b6d548cb7".to_string()); + } } diff --git a/src/blockdata/utxoset.rs b/src/blockdata/utxoset.rs index b9365868..bbdd1e76 100644 --- a/src/blockdata/utxoset.rs +++ b/src/blockdata/utxoset.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -21,11 +21,11 @@ use std::cmp; use std::collections::HashMap; use std::collections::hash::map::Iter; -use std::hash::SipHasher; use std::default::Default; use std::mem; +use eventual; +use eventual::Async; use num_cpus; -use std::sync::Future; use blockdata::transaction::{self, Transaction, TxOut}; use blockdata::constants::genesis_block; @@ -37,475 +37,469 @@ use util::hash::Sha256dHash; /// The amount of validation to do when updating the UTXO set #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)] pub enum ValidationLevel { - /// Blindly update the UTXO set (NOT recommended) - Nothing, - /// Check that the blocks are at least in the right order - Chain, - /// Check that any inputs are actually txouts in the set - Inputs, - /// Execute the scripts and ensure they pass - Script + /// Blindly update the UTXO set (NOT recommended) + Nothing, + /// Check that the blocks are at least in the right order + Chain, + /// Check that any inputs are actually txouts in the set + Inputs, + /// Execute the scripts and ensure they pass + Script } /// An error returned from a UTXO set operation #[derive(PartialEq, Eq, Clone, Debug)] pub enum Error { - /// prevhash of the new block is not the hash of the old block (expected, actual) - BadPrevHash(Sha256dHash, Sha256dHash), - /// A TXID was duplicated - DuplicatedTxid(Sha256dHash), - /// A tx was invalid (txid, error) - InvalidTx(Sha256dHash, transaction::Error), + /// prevhash of the new block is not the hash of the old block (expected, actual) + BadPrevHash(Sha256dHash, Sha256dHash), + /// A TXID was duplicated + DuplicatedTxid(Sha256dHash), + /// A tx was invalid (txid, error) + InvalidTx(Sha256dHash, transaction::Error), } struct UtxoNode { - /// Blockheight at which this UTXO appeared in the blockchain - height: u32, - /// Vector of outputs; None indicates a nonexistent or already spent output - outputs: Box<[Option]> + /// Blockheight at which this UTXO appeared in the blockchain + height: u32, + /// Vector of outputs; None indicates a nonexistent or already spent output + outputs: Box<[Option]> } impl_consensus_encoding!(UtxoNode, height, outputs); /// An iterator over UTXOs pub struct UtxoIterator<'a> { - tx_iter: Iter<'a, Sha256dHash, UtxoNode>, - current_key: Sha256dHash, - current: Option<&'a UtxoNode>, - tx_index: u32 + tx_iter: Iter<'a, Sha256dHash, UtxoNode>, + current_key: Sha256dHash, + current: Option<&'a UtxoNode>, + tx_index: u32 } impl<'a> Iterator for UtxoIterator<'a> { - type Item = (Sha256dHash, u32, &'a TxOut, u32); + type Item = (Sha256dHash, u32, &'a TxOut, u32); - fn next(&mut self) -> Option<(Sha256dHash, u32, &'a TxOut, u32)> { - while self.current.is_some() { - let current = &self.current.unwrap().outputs; - while self.tx_index < current.len() as u32 { - self.tx_index += 1; - if unsafe { current.get(self.tx_index as usize - 1) }.is_some() { - return Some((self.current_key, - self.tx_index, - unsafe { current.get(self.tx_index as usize - 1) }.as_ref().unwrap(), - self.current.unwrap().height)); + fn next(&mut self) -> Option<(Sha256dHash, u32, &'a TxOut, u32)> { + while let Some(current) = self.current { + while self.tx_index < current.outputs.len() as u32 { + self.tx_index += 1; + if let Some(ref cur) = current.outputs[self.tx_index as usize - 1] { + return Some((self.current_key, self.tx_index, + cur, current.height)); + } + } + match self.tx_iter.next() { + Some((&x, y)) => { + self.tx_index = 0; + self.current_key = x; + self.current = Some(y); + } + None => { self.current = None; } + } } - } - match self.tx_iter.next() { - Some((&x, y)) => { - self.tx_index = 0; - self.current_key = x; - self.current = Some(y); - } - None => { self.current = None; } - } + return None; } - return None; - } } /// The UTXO set pub struct UtxoSet { - table: HashMap, - last_hash: Sha256dHash, - // A circular buffer of deleted utxos, grouped by block - spent_txos: Vec>, - // The last index into the above buffer that was assigned to - spent_idx: u64, - n_utxos: u64, - n_pruned: u64 + table: HashMap, + last_hash: Sha256dHash, + // A circular buffer of deleted utxos, grouped by block + spent_txos: Vec>, + // The last index into the above buffer that was assigned to + spent_idx: u64, + n_utxos: u64, + n_pruned: u64 } impl_consensus_encoding!(UtxoSet, last_hash, n_utxos, n_pruned, spent_txos, spent_idx, table); impl UtxoSet { - /// Constructs a new UTXO set - pub fn new(network: Network, rewind_limit: usize) -> UtxoSet { - // There is in fact a transaction in the genesis block, but the Bitcoin - // reference client does not add its sole output to the UTXO set. We - // must follow suit, otherwise we will accept a transaction spending it - // while the reference client won't, causing us to fork off the network. - UtxoSet { - table: HashMap::with_hasher(SipHasher::new()), - last_hash: genesis_block(network).header.bitcoin_hash(), - spent_txos: Vec::from_elem(rewind_limit, vec![]), - spent_idx: 0, - n_utxos: 0, - n_pruned: 0 - } - } - - /// Add all the UTXOs of a transaction to the set - fn add_utxos(&mut self, tx: &Transaction, height: u32) -> Option { - let txid = tx.bitcoin_hash(); - // Locate node if it's already there - let new_node = unsafe { - let mut new_node = Vec::with_capacity(tx.output.len()); - for txo in tx.output.iter() { - // Unsafe since we are not uninitializing the old data in the vector - if txo.script_pubkey.is_provably_unspendable() { - new_node.push(None); - self.n_utxos -= 1; - self.n_pruned += 1; - } else { - new_node.push(Some(txo.clone())); + /// Constructs a new UTXO set + pub fn new(network: Network, rewind_limit: usize) -> UtxoSet { + // There is in fact a transaction in the genesis block, but the Bitcoin + // reference client does not add its sole output to the UTXO set. We + // must follow suit, otherwise we will accept a transaction spending it + // while the reference client won't, causing us to fork off the network. + UtxoSet { + table: HashMap::new(), + last_hash: genesis_block(network).header.bitcoin_hash(), + spent_txos: vec![vec![]; rewind_limit], + spent_idx: 0, + n_utxos: 0, + n_pruned: 0 } - } - UtxoNode { outputs: new_node.into_boxed_slice(), height: height } - }; - // Get the old value, if any (this is suprisingly possible, c.f. BIP30 - // and the other comments in this file referring to it) - let ret = self.table.swap(txid, new_node.into_boxed_slice()); - if ret.is_none() { - self.n_utxos += tx.output.len() as u64; } - ret - } - /// Remove a UTXO from the set and return it - fn take_utxo(&mut self, txid: Sha256dHash, vout: u32) -> Option<(u32, TxOut)> { - // This whole function has awkward scoping thx to lexical borrow scoping :( - let (height, ret, should_delete) = { - // Locate the UTXO, failing if not found - let node = match self.table.find_mut(&txid) { - Some(node) => node, - None => return None - }; + /// Add all the UTXOs of a transaction to the set + fn add_utxos(&mut self, tx: &Transaction, height: u32) -> Option { + let txid = tx.bitcoin_hash(); + // Locate node if it's already there + let new_node = unsafe { + let mut new_node = Vec::with_capacity(tx.output.len()); + for txo in tx.output.iter() { + // Unsafe since we are not uninitializing the old data in the vector + if txo.script_pubkey.is_provably_unspendable() { + new_node.push(None); + self.n_utxos -= 1; + self.n_pruned += 1; + } else { + new_node.push(Some(txo.clone())); + } + } + UtxoNode { outputs: new_node.into_boxed_slice(), height: height } + }; + // Get the old value, if any (this is suprisingly possible, c.f. BIP30 + // and the other comments in this file referring to it) + let ret = self.table.insert(txid, new_node); + if ret.is_none() { + self.n_utxos += tx.output.len() as u64; + } + ret + } - let ret = { + /// Remove a UTXO from the set and return it + fn take_utxo(&mut self, txid: Sha256dHash, vout: u32) -> Option<(u32, TxOut)> { + // This whole function has awkward scoping thx to lexical borrow scoping :( + let (height, ret, should_delete) = { + // Locate the UTXO, failing if not found + let node = match self.table.get_mut(&txid) { + Some(node) => node, + None => return None + }; + + let ret = { + // Check that this specific output is there + if vout as usize >= node.outputs.len() { return None; } + let replace = &mut node.outputs[vout as usize]; + replace.take() + }; + + let should_delete = node.outputs.iter().filter(|slot| slot.is_some()).count() == 0; + (node.height, ret, should_delete) + }; + + // Delete the whole node if it is no longer being used + if should_delete { + self.table.remove(&txid); + } + + self.n_utxos -= if ret.is_some() { 1 } else { 0 }; + ret.map(|o| (height, o)) + } + + /// Get a reference to a UTXO in the set + pub fn get_utxo<'a>(&'a self, txid: Sha256dHash, vout: u32) -> Option<(usize, &'a TxOut)> { + // Locate the UTXO, failing if not found + let node = match self.table.get(&txid) { + Some(node) => node, + None => return None + }; // Check that this specific output is there if vout as usize >= node.outputs.len() { return None; } - let replace = unsafe { node.outputs.get_mut(vout as usize) }; - replace.take() - }; - - let should_delete = node.outputs.iter().filter(|slot| slot.is_some()).count() == 0; - (node.height, ret, should_delete) - }; - - // Delete the whole node if it is no longer being used - if should_delete { - self.table.remove(&txid); + let replace = node.outputs[vout as usize]; + Some((node.height as usize, replace.as_ref().unwrap())) } - self.n_utxos -= if ret.is_some() { 1 } else { 0 }; - ret.map(|o| (height, o)) - } - - /// Get a reference to a UTXO in the set - pub fn get_utxo<'a>(&'a self, txid: Sha256dHash, vout: u32) -> Option<(usize, &'a TxOut)> { - // Locate the UTXO, failing if not found - let node = match self.table.find(&txid) { - Some(node) => node, - None => return None - }; - // Check that this specific output is there - if vout as usize >= node.outputs.len() { return None; } - let replace = unsafe { node.outputs.get(vout as usize) }; - Some((node.height as usize, replace.as_ref().unwrap())) - } - - /// Apply the transactions contained in a block - pub fn update(&mut self, block: &Block, blockheight: usize, validation: ValidationLevel) - -> Result<(), Error> { - // Make sure we are extending the UTXO set in order - if validation >= ValidationLevel::Chain && - self.last_hash != block.header.prev_blockhash { - return Err(Error::BadPrevHash(self.last_hash, block.header.prev_blockhash)); - } - - // Set the next hash immediately so that if anything goes wrong, - // we can rewind from the point that we're at. - self.last_hash = block.header.bitcoin_hash(); - let spent_idx = self.spent_idx as usize; - self.spent_idx = (self.spent_idx + 1) % self.spent_txos.len() as u64; - self.spent_txos.get_mut(spent_idx).clear(); - - // Add all the utxos so that we can have chained transactions within the - // same block. (Note that Bitcoin requires chained transactions to be in - // the correct order, which we do not check, so we are minorly too permissive. - // TODO this is a consensus bug.) - for tx in block.txdata.iter() { - let txid = tx.bitcoin_hash(); - // Add outputs -- add_utxos returns the original transaction if this is a dupe. - // Note that this can only happen with coinbases, and in this case the block - // is invalid, -except- for two historic blocks which appeared in the - // blockchain before the dupes were noticed. - // See bitcoind commit `ab91bf39` and BIP30. - match self.add_utxos(tx, blockheight as u32) { - Some(mut replace) => { - let blockhash = block.header.bitcoin_hash().be_hex_string(); - if blockhash == "00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec".to_string() || - blockhash == "00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721".to_string() { - // For these specific blocks, overwrite the old UTXOs. - // (Actually add_utxos() already did this, so we do nothing.) - } else { - // Otherwise put the replaced txouts into the `deleted` cache - // so that rewind will put them back. - self.spent_txos.get_mut(spent_idx).reserve_additional(replace.outputs.len()); - for (n, input) in replace.outputs.iter_mut().enumerate() { - match input.take() { - Some(txo) => { self.spent_txos.get_mut(spent_idx).push(((txid, n as u32), (replace.height, txo))); } - None => {} - } - } - // Otherwise fail the block - self.rewind(block); - return Err(Error::DuplicatedTxid(txid)); - } + /// Apply the transactions contained in a block + pub fn update(&mut self, block: &Block, blockheight: usize, validation: ValidationLevel) + -> Result<(), Error> { + // Make sure we are extending the UTXO set in order + if validation >= ValidationLevel::Chain && + self.last_hash != block.header.prev_blockhash { + return Err(Error::BadPrevHash(self.last_hash, block.header.prev_blockhash)); } - // Didn't replace anything? Good. - None => {} - } - } - // If we are validating scripts, do all that now in parallel - if validation >= ValidationLevel::Script { - let mut future_vec = Vec::with_capacity(block.txdata.len() - 1); - // skip the genesis since we don't validate this script. (TODO this might - // be a consensus bug since we don't even check that the opcodes make sense.) - let n_threads = cmp::min(block.txdata.len() - 1, num_cpus::get()); - for j in 0..n_threads { - let n_elems = block.txdata.len() - 1; - let start = 1 + j * n_elems / n_threads; - let end = cmp::min(n_elems, 1 + (j + 1) * n_elems / n_threads); + // Set the next hash immediately so that if anything goes wrong, + // we can rewind from the point that we're at. + self.last_hash = block.header.bitcoin_hash(); + let spent_idx = self.spent_idx as usize; + self.spent_idx = (self.spent_idx + 1) % self.spent_txos.len() as u64; + (&mut self.spent_txos[spent_idx]).clear(); - let s = self as *mut _ as *const UtxoSet; - let txes = &block.txdata as *const _; - future_vec.push(Future::spawn(move || { - let txes = unsafe {&*txes}; - for tx in txes.slice(start, end).iter() { - match tx.validate(unsafe {&*s}) { - Ok(_) => {}, - Err(e) => { return Err(Error::InvalidTx(tx.bitcoin_hash(), e)); } - } - } - Ok(()) - })); - } - // Return the last error since we need to finish every future before - // leaving this function, and given that, it's easier to return the last. - let mut last_error = Ok(()); - for res in future_vec.iter_mut().map(|f| f.get()) { - if res.is_err() { - last_error = res; - } - } - if last_error.is_err() { - return last_error; - } - } - - for tx in block.txdata.iter().skip(1) { - let txid = tx.bitcoin_hash(); - // Put the removed utxos into the stxo cache, in case we need to rewind - self.spent_txos.get_mut(spent_idx).reserve_additional(tx.input.len()); - for (n, input) in tx.input.iter().enumerate() { - let taken = self.take_utxo(input.prev_hash, input.prev_index); - match taken { - Some(txo) => { self.spent_txos.get_mut(spent_idx).push(((txid, n as u32), txo)); } - None => { - if validation >= ValidationLevel::Inputs { - self.rewind(block); - return Err(Error::InvalidTx(txid, - transaction::Error::InputNotFound(input.prev_hash, input.prev_index))); - } - } - } - } - } - // If we made it here, success! - Ok(()) - } - - /// Unapply the transactions contained in a block - pub fn rewind(&mut self, block: &Block) -> bool { - // Make sure we are rewinding the latest block - if self.last_hash != block.header.bitcoin_hash() { - return false; - } - - // We deliberately do no error checking here, since we may be rewinding - // from halfway through the new block addition, in which case many of - // the utxos we try to remove may be missing; the ones we try to add, - // we stored ourselves when we removed them, so they won't be unaddable - // for any reason. - // Plus we don't care too much about efficiency, not many blocks should - // get rewound. - - // Delete added txouts - let mut skipped_genesis = false; - for tx in block.txdata.iter() { - let txhash = tx.bitcoin_hash(); - for n in 0..tx.output.len() { - // Just bomb out the whole transaction - // TODO: this does not conform to BIP30: if a duplicate txid occurs, - // the block will be (rightly) rejected, causing it to be - // unwound. But when we get here, we can't see the duplicate, - // so we wind up deleting the old txid! This is very bad, and - // if it occurs, an affected user will have to recreate his - // whole UTXO index to get the original txid back. - self.take_utxo(txhash, n as u32); - } - - // Read deleted txouts - if skipped_genesis { - let mut extract_vec = vec![]; - mem::swap(&mut extract_vec, self.spent_txos.get_mut(self.spent_idx as usize)); - for ((txid, n), (height, txo)) in extract_vec.into_iter() { - // Remove the tx's utxo list and patch the txo into place - let new_node = - match self.table.pop(&txid) { - Some(mut node) => { - node.outputs[n as usize] = Some(txo); - node - } - None => { - unsafe { - let mut thinvec = Vec::with_capacity(n + 1); - for _ in 0..n { - thinvec.push(None); + // Add all the utxos so that we can have chained transactions within the + // same block. (Note that Bitcoin requires chained transactions to be in + // the correct order, which we do not check, so we are minorly too permissive. + // TODO this is a consensus bug.) + for tx in block.txdata.iter() { + let txid = tx.bitcoin_hash(); + // Add outputs -- add_utxos returns the original transaction if this is a dupe. + // Note that this can only happen with coinbases, and in this case the block + // is invalid, -except- for two historic blocks which appeared in the + // blockchain before the dupes were noticed. + // See bitcoind commit `ab91bf39` and BIP30. + match self.add_utxos(tx, blockheight as u32) { + Some(mut replace) => { + let blockhash = block.header.bitcoin_hash().be_hex_string(); + if blockhash == "00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec".to_string() || + blockhash == "00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721".to_string() { + // For these specific blocks, overwrite the old UTXOs. + // (Actually add_utxos() already did this, so we do nothing.) + } else { + // Otherwise put the replaced txouts into the `deleted` cache + // so that rewind will put them back. + (&mut self.spent_txos[spent_idx]).reserve(replace.outputs.len()); + for (n, input) in replace.outputs.iter_mut().enumerate() { + match input.take() { + Some(txo) => { (&mut self.spent_txos[spent_idx]).push(((txid, n as u32), (replace.height, txo))); } + None => {} + } + } + // Otherwise fail the block + self.rewind(block); + return Err(Error::DuplicatedTxid(txid)); } - thinvec.push(Some(txo)); - UtxoNode { outputs: thinvec.into_boxed_slice(), height: height } - } } - }; - // Ram it back into the tree - self.table.insert(txid, new_node); + // Didn't replace anything? Good. + None => {} + } } - } - skipped_genesis = true; + + // If we are validating scripts, do all that now in parallel + if validation >= ValidationLevel::Script { + let mut future_vec = Vec::with_capacity(block.txdata.len() - 1); + // skip the genesis since we don't validate this script. (TODO this might + // be a consensus bug since we don't even check that the opcodes make sense.) + let n_threads = cmp::min(block.txdata.len() - 1, num_cpus::get()); + for j in 0..n_threads { + let n_elems = block.txdata.len() - 1; + let start = 1 + j * n_elems / n_threads; + let end = cmp::min(n_elems, 1 + (j + 1) * n_elems / n_threads); + + // WARNING: we are asserting that these variables will outlive the Futures; + // this means that we need to await all Futures before leaving the + // function or else risk use-after-free in the async threads. + let static_txes = unsafe { &*(&block.txdata as *const Vec) }; + let static_self = unsafe { &*(self as *const UtxoSet) }; + future_vec.push(eventual::Future::spawn(move || { + for tx in static_txes[start..end].iter() { + match tx.validate(static_self) { + Ok(_) => {}, + Err(e) => { return Err(Error::InvalidTx(tx.bitcoin_hash(), e)); } + } + } + Ok(()) + })); + } + // Return the last error since we need to finish every future before + // leaving this function, and given that, it's easier to return the last. + let mut last_err = Ok(()); + for res in future_vec.iter_mut().map(|f| f.await().unwrap()) { + if res.is_err() { last_err = res; } + } + if last_err.is_err() { return last_err; } + } + + for tx in block.txdata.iter().skip(1) { + let txid = tx.bitcoin_hash(); + // Put the removed utxos into the stxo cache, in case we need to rewind + (&self.spent_txos[spent_idx]).reserve(tx.input.len()); + for (n, input) in tx.input.iter().enumerate() { + let taken = self.take_utxo(input.prev_hash, input.prev_index); + match taken { + Some(txo) => { (&mut self.spent_txos[spent_idx]).push(((txid, n as u32), txo)); } + None => { + if validation >= ValidationLevel::Inputs { + self.rewind(block); + return Err(Error::InvalidTx(txid, + transaction::Error::InputNotFound(input.prev_hash, input.prev_index))); + } + } + } + } + } + // If we made it here, success! + Ok(()) } - // Decrement mod the spent txo cache size - self.spent_idx = (self.spent_idx + self.spent_txos.len() as u64 - 1) % - self.spent_txos.len() as u64; - self.last_hash = block.header.prev_blockhash; - return true; - } + /// Unapply the transactions contained in a block + pub fn rewind(&mut self, block: &Block) -> bool { + // Make sure we are rewinding the latest block + if self.last_hash != block.header.bitcoin_hash() { + return false; + } - /// Get the hash of the last block added to the utxo set - pub fn last_hash(&self) -> Sha256dHash { - self.last_hash - } + // We deliberately do no error checking here, since we may be rewinding + // from halfway through the new block addition, in which case many of + // the utxos we try to remove may be missing; the ones we try to add, + // we stored ourselves when we removed them, so they won't be unaddable + // for any reason. + // Plus we don't care too much about efficiency, not many blocks should + // get rewound. - /// Get the number of UTXOs in the set - pub fn n_utxos(&self) -> usize { - self.n_utxos as usize - } + // Delete added txouts + let mut skipped_genesis = false; + for tx in block.txdata.iter() { + let txhash = tx.bitcoin_hash(); + for n in 0..tx.output.len() { + // Just bomb out the whole transaction + // TODO: this does not conform to BIP30: if a duplicate txid occurs, + // the block will be (rightly) rejected, causing it to be + // unwound. But when we get here, we can't see the duplicate, + // so we wind up deleting the old txid! This is very bad, and + // if it occurs, an affected user will have to recreate his + // whole UTXO index to get the original txid back. + self.take_utxo(txhash, n as u32); + } - /// Get the number of UTXOs ever pruned from the set (this is not updated - /// during reorgs, so it may return a higher number than is realistic). - pub fn n_pruned(&self) -> usize { - self.n_pruned as usize - } + // Read deleted txouts + if skipped_genesis { + let mut extract_vec = vec![]; + mem::swap(&mut extract_vec, (&mut self.spent_txos[self.spent_idx as usize])); + for ((txid, n), (height, txo)) in extract_vec.into_iter() { + // Remove the tx's utxo list and patch the txo into place + let new_node = match self.table.remove(&txid) { + Some(mut node) => { + node.outputs[n as usize] = Some(txo); + node + } + None => { + unsafe { + let mut thinvec = Vec::with_capacity(n as usize + 1); + for _ in 0..n { + thinvec.push(None); + } + thinvec.push(Some(txo)); + UtxoNode { outputs: thinvec.into_boxed_slice(), height: height } + } + } + }; + // Ram it back into the tree + self.table.insert(txid, new_node); + } + } + skipped_genesis = true; + } - /// Get an iterator over all UTXOs - pub fn iter<'a>(&'a self) -> UtxoIterator<'a> { - let mut iter = self.table.iter(); - let first = iter.next(); - match first { - Some((&key, val)) => UtxoIterator { - current_key: key, - current: Some(val), - tx_iter: iter, - tx_index: 0 - }, - None => UtxoIterator { - current_key: Default::default(), - current: None, - tx_iter: iter, - tx_index: 0 + // Decrement mod the spent txo cache size + self.spent_idx = (self.spent_idx + self.spent_txos.len() as u64 - 1) % + self.spent_txos.len() as u64; + self.last_hash = block.header.prev_blockhash; + return true; + } + + /// Get the hash of the last block added to the utxo set + pub fn last_hash(&self) -> Sha256dHash { + self.last_hash + } + + /// Get the number of UTXOs in the set + pub fn n_utxos(&self) -> usize { + self.n_utxos as usize + } + + /// Get the number of UTXOs ever pruned from the set (this is not updated + /// during reorgs, so it may return a higher number than is realistic). + pub fn n_pruned(&self) -> usize { + self.n_pruned as usize + } + + /// Get an iterator over all UTXOs + pub fn iter<'a>(&'a self) -> UtxoIterator<'a> { + let mut iter = self.table.iter(); + let first = iter.next(); + match first { + Some((&key, val)) => UtxoIterator { + current_key: key, + current: Some(val), + tx_iter: iter, + tx_index: 0 + }, + None => UtxoIterator { + current_key: Default::default(), + current: None, + tx_iter: iter, + tx_index: 0 + } } } - } } #[cfg(test)] mod tests { - use std::prelude::*; - use std::io; - use serialize::hex::FromHex; + use std::prelude::*; + use std::io; + use serialize::hex::FromHex; - use super::{UtxoSet, ValidationLevel}; + use super::{UtxoSet, ValidationLevel}; - use blockdata::block::Block; - use network::constants::Network::Bitcoin; - use network::serialize::{BitcoinHash, deserialize, serialize}; + use blockdata::block::Block; + use network::constants::Network::Bitcoin; + use network::serialize::{BitcoinHash, deserialize, serialize}; - #[test] - fn utxoset_serialize_test() { - let mut empty_set = UtxoSet::new(Bitcoin, 100); + #[test] + fn utxoset_serialize_test() { + let mut empty_set = UtxoSet::new(Bitcoin, 100); - let new_block: Block = deserialize("010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap()).unwrap(); + let new_block: Block = deserialize("010000004ddccd549d28f385ab457e98d1b11ce80bfea2c5ab93015ade4973e400000000bf4473e53794beae34e64fccc471dace6ae544180816f89591894e0f417a914cd74d6e49ffff001d323b3a7b0201000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804ffff001d026e04ffffffff0100f2052a0100000043410446ef0102d1ec5240f0d061a4246c1bdef63fc3dbab7733052fbbf0ecd8f41fc26bf049ebb4f9527f374280259e7cfa99c48b0e3f39c51347a19a5819651503a5ac00000000010000000321f75f3139a013f50f315b23b0c9a2b6eac31e2bec98e5891c924664889942260000000049483045022100cb2c6b346a978ab8c61b18b5e9397755cbd17d6eb2fe0083ef32e067fa6c785a02206ce44e613f31d9a6b0517e46f3db1576e9812cc98d159bfdaf759a5014081b5c01ffffffff79cda0945903627c3da1f85fc95d0b8ee3e76ae0cfdc9a65d09744b1f8fc85430000000049483045022047957cdd957cfd0becd642f6b84d82f49b6cb4c51a91f49246908af7c3cfdf4a022100e96b46621f1bffcf5ea5982f88cef651e9354f5791602369bf5a82a6cd61a62501fffffffffe09f5fe3ffbf5ee97a54eb5e5069e9da6b4856ee86fc52938c2f979b0f38e82000000004847304402204165be9a4cbab8049e1af9723b96199bfd3e85f44c6b4c0177e3962686b26073022028f638da23fc003760861ad481ead4099312c60030d4cb57820ce4d33812a5ce01ffffffff01009d966b01000000434104ea1feff861b51fe3f5f8a3b12d0f4712db80e919548a80839fc47c6a21e66d957e9c5d8cd108c7a2d2324bad71f9904ac0ae7336507d785b17a2c115e427a32fac00000000".from_hex().unwrap()).unwrap(); - // Make sure we can't add the block directly, since we are missing the inputs - assert!(empty_set.update(&new_block, 1, ValidationLevel::Inputs).is_err()); - assert_eq!(empty_set.n_utxos(), 0); - // Add the block manually so that we'll have some UTXOs for the rest of the test - for tx in new_block.txdata.iter() { - empty_set.add_utxos(tx, 1); + // Make sure we can't add the block directly, since we are missing the inputs + assert!(empty_set.update(&new_block, 1, ValidationLevel::Inputs).is_err()); + assert_eq!(empty_set.n_utxos(), 0); + // Add the block manually so that we'll have some UTXOs for the rest of the test + for tx in new_block.txdata.iter() { + empty_set.add_utxos(tx, 1); + } + empty_set.last_hash = new_block.header.bitcoin_hash(); + + // Check that all the UTXOs were added + assert_eq!(empty_set.n_utxos(), 2); + for tx in new_block.txdata.iter() { + let hash = tx.bitcoin_hash(); + for (n, out) in tx.output.iter().enumerate() { + let n = n as u32; + assert_eq!(empty_set.get_utxo(hash, n), Some((1, &out.clone()))); + } + } + + // Check again that we can't add the block, and that this doesn't mess up the + // existing UTXOs + assert!(empty_set.update(&new_block, 2, ValidationLevel::Inputs).is_err()); + assert_eq!(empty_set.n_utxos(), 2); + for tx in new_block.txdata.iter() { + let hash = tx.bitcoin_hash(); + for (n, out) in tx.output.iter().enumerate() { + let n = n as u32; + assert_eq!(empty_set.get_utxo(hash, n), Some((1, &out.clone()))); + } + } + + // Serialize/deserialize the resulting UTXO set + let serial = serialize(&empty_set).unwrap(); + + let deserial: io::Result = deserialize(serial.clone()); + assert!(deserial.is_ok()); + + // Check that all outputs are there + let mut read_set = deserial.unwrap(); + for tx in new_block.txdata.iter() { + let hash = tx.bitcoin_hash(); + + for (n, out) in tx.output.iter().enumerate() { + let n = n as u32; + // Try taking non-existent UTXO + assert_eq!(read_set.take_utxo(hash, 100 + n), None); + // Check take of real UTXO + let ret = read_set.take_utxo(hash, n); + assert_eq!(ret, Some((1, out.clone()))); + // Try double-take + assert_eq!(read_set.take_utxo(hash, n), None); + } + } + + let deserial_again: io::Result = deserialize(serial); + let mut read_again = deserial_again.unwrap(); + assert!(read_again.rewind(&new_block)); + assert_eq!(read_again.n_utxos(), 0); + for tx in new_block.txdata.iter() { + let hash = tx.bitcoin_hash(); + + for n in 0..tx.output.len() { + let n = n as u32; + let ret = read_again.take_utxo(hash, n); + assert_eq!(ret, None); + } + } } - empty_set.last_hash = new_block.header.bitcoin_hash(); - - // Check that all the UTXOs were added - assert_eq!(empty_set.n_utxos(), 2); - for tx in new_block.txdata.iter() { - let hash = tx.bitcoin_hash(); - for (n, out) in tx.output.iter().enumerate() { - let n = n as u32; - assert_eq!(empty_set.get_utxo(hash, n), Some((1, &out.clone()))); - } - } - - // Check again that we can't add the block, and that this doesn't mess up the - // existing UTXOs - assert!(empty_set.update(&new_block, 2, ValidationLevel::Inputs).is_err()); - assert_eq!(empty_set.n_utxos(), 2); - for tx in new_block.txdata.iter() { - let hash = tx.bitcoin_hash(); - for (n, out) in tx.output.iter().enumerate() { - let n = n as u32; - assert_eq!(empty_set.get_utxo(hash, n), Some((1, &out.clone()))); - } - } - - // Serialize/deserialize the resulting UTXO set - let serial = serialize(&empty_set).unwrap(); - - let deserial: io::Result = deserialize(serial.clone()); - assert!(deserial.is_ok()); - - // Check that all outputs are there - let mut read_set = deserial.unwrap(); - for tx in new_block.txdata.iter() { - let hash = tx.bitcoin_hash(); - - for (n, out) in tx.output.iter().enumerate() { - let n = n as u32; - // Try taking non-existent UTXO - assert_eq!(read_set.take_utxo(hash, 100 + n), None); - // Check take of real UTXO - let ret = read_set.take_utxo(hash, n); - assert_eq!(ret, Some((1, out.clone()))); - // Try double-take - assert_eq!(read_set.take_utxo(hash, n), None); - } - } - - let deserial_again: io::Result = deserialize(serial); - let mut read_again = deserial_again.unwrap(); - assert!(read_again.rewind(&new_block)); - assert_eq!(read_again.n_utxos(), 0); - for tx in new_block.txdata.iter() { - let hash = tx.bitcoin_hash(); - - for n in 0..tx.output.len() { - let n = n as u32; - let ret = read_again.take_utxo(hash, n); - assert_eq!(ret, None); - } - } - } } diff --git a/src/internal_macros.rs b/src/internal_macros.rs index a6b89a35..c5748e5d 100644 --- a/src/internal_macros.rs +++ b/src/internal_macros.rs @@ -99,45 +99,7 @@ macro_rules! impl_array_newtype { } } - impl ::std::ops::Index<::std::ops::Range> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, index: ::std::ops::Range) -> &[$ty] { - let &$thing(ref dat) = self; - &dat[index] - } - } - - impl ::std::ops::Index<::std::ops::RangeTo> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, index: ::std::ops::RangeTo) -> &[$ty] { - let &$thing(ref dat) = self; - &dat[index] - } - } - - impl ::std::ops::Index<::std::ops::RangeFrom> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, index: ::std::ops::RangeFrom) -> &[$ty] { - let &$thing(ref dat) = self; - &dat[index] - } - } - - impl ::std::ops::Index<::std::ops::RangeFull> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, _: ::std::ops::RangeFull) -> &[$ty] { - let &$thing(ref dat) = self; - &dat[..] - } - } + impl_index_newtype!($thing, $ty); impl PartialEq for $thing { #[inline] @@ -234,6 +196,47 @@ macro_rules! impl_array_newtype_show { } } +macro_rules! impl_index_newtype { + ($thing:ident, $ty:ty) => { + impl ::std::ops::Index<::std::ops::Range> for $thing { + type Output = [$ty]; + + #[inline] + fn index(&self, index: ::std::ops::Range) -> &[$ty] { + &self.0[index] + } + } + + impl ::std::ops::Index<::std::ops::RangeTo> for $thing { + type Output = [$ty]; + + #[inline] + fn index(&self, index: ::std::ops::RangeTo) -> &[$ty] { + &self.0[index] + } + } + + impl ::std::ops::Index<::std::ops::RangeFrom> for $thing { + type Output = [$ty]; + + #[inline] + fn index(&self, index: ::std::ops::RangeFrom) -> &[$ty] { + &self.0[index] + } + } + + impl ::std::ops::Index<::std::ops::RangeFull> for $thing { + type Output = [$ty]; + + #[inline] + fn index(&self, _: ::std::ops::RangeFull) -> &[$ty] { + &self.0[..] + } + } + + } +} + macro_rules! display_from_debug { ($thing:ident) => { impl ::std::fmt::Display for $thing { diff --git a/src/lib.rs b/src/lib.rs index 9a52a3fa..881549c6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -47,6 +47,7 @@ extern crate alloc; extern crate byteorder; extern crate collections; +extern crate eventual; extern crate num_cpus; extern crate rand; extern crate rustc_serialize as serialize; diff --git a/src/macros.rs b/src/macros.rs index 87f86414..74c5ee79 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -18,128 +18,127 @@ #[macro_export] macro_rules! nu_select { - ($($name:pat = $rx:expr => $code:expr),+) => ({ - nu_select!{ $($name = $rx, recv => $code),+ } - }); - ($($name:pat = $rx:expr, $meth:ident => $code:expr),+) => ({ - use rustrt::local::Local; - use rustrt::task::Task; - use sync::comm::Packet; + ($($name:pat = $rx:expr => $code:expr),+) => ({ + nu_select!{ $($name = $rx, recv => $code),+ } + }); + ($($name:pat = $rx:expr, $meth:ident => $code:expr),+) => ({ + use rustrt::local::Local; + use rustrt::task::Task; + use sync::comm::Packet; - // Is anything already ready to receive? Grab it without waiting. - $( - if (&$rx as &Packet).can_recv() { - let $name = $rx.$meth(); - $code - } - )else+ - else { - - // Start selecting on as many as we need to before getting a bite. - // Keep count of how many, since we need to abort every selection - // that we started. - let mut started_count = 0; - // Restrict lifetime of borrows in `packets` - { - let packets = [ $( &$rx as &Packet, )+ ]; - - let task: Box = Local::take(); - task.deschedule(packets.len(), |task| { - match packets[started_count].start_selection(task) { - Ok(()) => { - started_count += 1; - Ok(()) + // Is anything already ready to receive? Grab it without waiting. + $( + if (&$rx as &Packet).can_recv() { + let $name = $rx.$meth(); + $code } - Err(task) => Err(task) - } - }); - } - - let mut i = 0; - let ret = $( - // Abort the receivers, stopping at the first ready one to get its data. - if { i += 1; i <= started_count } && - // If start_selection() failed, abort_selection() will fail too, - // but it still counts as "data available". - ($rx.abort_selection() || i == started_count) { - // React to the first - let $name = $rx.$meth(); - $code - })else+ + )else+ else { - fail!("we didn't find the ready receiver, but we should have had one"); - }; - // At this point, the first i receivers have been aborted. We need to abort the rest: - $(if i > 0 { - i -= 1; - } else { - $rx.abort_selection(); - })+ - let _ = i; // Shut up `i -= 1 but i is never read` warning - // Return - ret - } - }) + // Start selecting on as many as we need to before getting a bite. + // Keep count of how many, since we need to abort every selection + // that we started. + let mut started_count = 0; + // Restrict lifetime of borrows in `packets` + { + let packets = [ $( &$rx as &Packet, )+ ]; + + let task: Box = Local::take(); + task.deschedule(packets.len(), |task| { + match packets[started_count].start_selection(task) { + Ok(()) => { + started_count += 1; + Ok(()) + } + Err(task) => Err(task) + } + }); + } + + let mut i = 0; + let ret = $( + // Abort the receivers, stopping at the first ready one to get its data. + if { i += 1; i <= started_count } && + // If start_selection() failed, abort_selection() will fail too, + // but it still counts as "data available". + ($rx.abort_selection() || i == started_count) { + // React to the first + let $name = $rx.$meth(); + $code + })else+ + else { + fail!("we didn't find the ready receiver, but we should have had one"); + }; + // At this point, the first i receivers have been aborted. We need to abort the rest: + $(if i > 0 { + i -= 1; + } else { + $rx.abort_selection(); + })+ + let _ = i; // Shut up `i -= 1 but i is never read` warning + // Return + ret + } + }) } #[macro_export] macro_rules! user_enum { - ($(#[$attr:meta])* pub enum $name:ident { $(#[$doc:meta] $elem:ident <-> $txt:expr),* }) => ( - $(#[$attr])* - pub enum $name { - $(#[$doc] $elem),* - } - - impl ::std::fmt::Debug for $name { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - f.pad(match *self { - $($elem => $txt),* - }) - } - } - - impl ::std::fmt::Display for $name { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - f.pad(match *self { - $($elem => $txt),* - }) - } - } - - impl ::serde::Deserialize for $name { - #[inline] - fn deserialize(d: &mut D) -> Result<$name, D::Error> - where D: ::serde::Deserializer - { - struct Visitor; - impl ::serde::de::Visitor for Visitor { - type Value = $name; - - fn visit_string(&mut self, v: String) -> Result<$name, E> - where E: ::serde::de::Error - { - self.visit_str(&v) - } - - fn visit_str(&mut self, s: &str) -> Result<$name, E> - where E: ::serde::de::Error - { - $( if s == $txt { Ok($name::$elem) } )else* - else { Err(::serde::de::Error::syntax_error()) } - } + ($(#[$attr:meta])* pub enum $name:ident { $(#[$doc:meta] $elem:ident <-> $txt:expr),* }) => ( + $(#[$attr])* + pub enum $name { + $(#[$doc] $elem),* } - d.visit(Visitor) - } - } + impl ::std::fmt::Debug for $name { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + f.pad(match *self { + $($elem => $txt),* + }) + } + } - impl ::serde::Serialize for $name { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: ::serde::Serializer - { - s.visit_str(&self.to_string()) - } - } - ); + impl ::std::fmt::Display for $name { + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + f.pad(match *self { + $($elem => $txt),* + }) + } + } + + impl ::serde::Deserialize for $name { + #[inline] + fn deserialize(d: &mut D) -> Result<$name, D::Error> + where D: ::serde::Deserializer + { + struct Visitor; + impl ::serde::de::Visitor for Visitor { + type Value = $name; + + fn visit_string(&mut self, v: String) -> Result<$name, E> + where E: ::serde::de::Error + { + self.visit_str(&v) + } + + fn visit_str(&mut self, s: &str) -> Result<$name, E> + where E: ::serde::de::Error + { + $( if s == $txt { Ok($name::$elem) } )else* + else { Err(::serde::de::Error::syntax_error()) } + } + } + + d.visit(Visitor) + } + } + + impl ::serde::Serialize for $name { + fn serialize(&self, s: &mut S) -> Result<(), S::Error> + where S: ::serde::Serializer + { + s.visit_str(&self.to_string()) + } + } + ); } diff --git a/src/network/address.rs b/src/network/address.rs index bbe601c0..e5339e8d 100644 --- a/src/network/address.rs +++ b/src/network/address.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -25,107 +25,107 @@ use network::encodable::{ConsensusDecodable, ConsensusEncodable}; /// A message which can be sent on the Bitcoin network pub struct Address { - /// Services provided by the peer whose address this is - pub services: u64, - /// Network byte-order ipv6 address, or ipv4-mapped ipv6 address - pub address: [u16; 8], - /// Network port - pub port: u16 + /// Services provided by the peer whose address this is + pub services: u64, + /// Network byte-order ipv6 address, or ipv4-mapped ipv6 address + pub address: [u16; 8], + /// Network port + pub port: u16 } impl ConsensusEncodable for Address { - #[inline] - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - try!(self.services.consensus_encode(s)); - try!(self.address.consensus_encode(s)); - // Explicitly code the port since it needs to be big-endian - try!(((self.port / 0x100) as u8).consensus_encode(s)); - try!(((self.port % 0x100) as u8).consensus_encode(s)); - Ok(()) - } + #[inline] + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + try!(self.services.consensus_encode(s)); + try!(self.address.consensus_encode(s)); + // Explicitly code the port since it needs to be big-endian + try!(((self.port / 0x100) as u8).consensus_encode(s)); + try!(((self.port % 0x100) as u8).consensus_encode(s)); + Ok(()) + } } impl ConsensusDecodable for Address { - #[inline] - fn consensus_decode(d: &mut D) -> Result { - Ok(Address { - services: try!(ConsensusDecodable::consensus_decode(d)), - address: try!(ConsensusDecodable::consensus_decode(d)), - // Explicitly code the port since it needs to be big-endian - port: { - let b1: u8 = try!(ConsensusDecodable::consensus_decode(d)); - let b2: u8 = try!(ConsensusDecodable::consensus_decode(d)); - (b1 as u16 * 0x100) + (b2 as u16) - } - }) - } + #[inline] + fn consensus_decode(d: &mut D) -> Result { + Ok(Address { + services: try!(ConsensusDecodable::consensus_decode(d)), + address: try!(ConsensusDecodable::consensus_decode(d)), + // Explicitly code the port since it needs to be big-endian + port: { + let b1: u8 = try!(ConsensusDecodable::consensus_decode(d)); + let b2: u8 = try!(ConsensusDecodable::consensus_decode(d)); + (b1 as u16 * 0x100) + (b2 as u16) + } + }) + } } impl fmt::Debug for Address { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // TODO: render services and hex-ize address - write!(f, "Address {{services: {:?}, address: {:?}, port: {:?}}}", - self.services, &self.address[..], self.port) - } + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // TODO: render services and hex-ize address + write!(f, "Address {{services: {:?}, address: {:?}, port: {:?}}}", + self.services, &self.address[..], self.port) + } } impl Clone for Address { - fn clone(&self) -> Address { - unsafe { - use std::intrinsics::copy_nonoverlapping; - use std::mem; - let mut ret = mem::uninitialized(); - copy_nonoverlapping(self, - &mut ret, - mem::size_of::
()); - ret + fn clone(&self) -> Address { + unsafe { + use std::intrinsics::copy_nonoverlapping; + use std::mem; + let mut ret = mem::uninitialized(); + copy_nonoverlapping(self, + &mut ret, + mem::size_of::
()); + ret + } } - } } impl PartialEq for Address { - fn eq(&self, other: &Address) -> bool { - self.services == other.services && - &self.address[..] == &other.address[..] && - self.port == other.port - } + fn eq(&self, other: &Address) -> bool { + self.services == other.services && + &self.address[..] == &other.address[..] && + self.port == other.port + } } impl Eq for Address {} #[cfg(test)] mod test { - use super::Address; + use super::Address; - use std::io; + use std::io; - use network::serialize::{deserialize, serialize}; + use network::serialize::{deserialize, serialize}; - #[test] - fn serialize_address_test() { - assert_eq!(serialize(&Address { - services: 1, - address: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1], - port: 8333 - }), - Ok(vec![1u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1, 0x20, 0x8d])); - } + #[test] + fn serialize_address_test() { + assert_eq!(serialize(&Address { + services: 1, + address: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1], + port: 8333 + }), + Ok(vec![1u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1, 0x20, 0x8d])); + } - #[test] - fn deserialize_address_test() { - let mut addr: io::Result
= deserialize(vec![1u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, - 0, 1, 0x20, 0x8d]); - assert!(addr.is_ok()); - let full = addr.unwrap(); - assert!(full.services == 1); - assert!(full.address == [0u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1]); - assert!(full.port == 8333); + #[test] + fn deserialize_address_test() { + let mut addr: io::Result
= deserialize(vec![1u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, + 0, 1, 0x20, 0x8d]); + assert!(addr.is_ok()); + let full = addr.unwrap(); + assert!(full.services == 1); + assert!(full.address == [0u8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1]); + assert!(full.port == 8333); - addr = deserialize(vec![1u8, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1]); - assert!(addr.is_err()); - } + addr = deserialize(vec![1u8, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 0x0a, 0, 0, 1]); + assert!(addr.is_err()); + } } diff --git a/src/network/message_blockdata.rs b/src/network/message_blockdata.rs index a426ed9e..72cd939c 100644 --- a/src/network/message_blockdata.rs +++ b/src/network/message_blockdata.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -26,12 +26,12 @@ use util::hash::Sha256dHash; #[derive(PartialEq, Eq, Clone, Debug)] /// The type of an inventory object pub enum InvType { - /// Error --- these inventories can be ignored - Error, - /// Transaction - Transaction, - /// Block - Block + /// Error --- these inventories can be ignored + Error, + /// Transaction + Transaction, + /// Block + Block } // Some simple messages @@ -39,133 +39,133 @@ pub enum InvType { /// The `getblocks` message #[derive(PartialEq, Eq, Clone, Debug)] pub struct GetBlocksMessage { - /// The protocol version - pub version: u32, - /// Locator hashes --- ordered newest to oldest. The remote peer will - /// reply with its longest known chain, starting from a locator hash - /// if possible and block 1 otherwise. - pub locator_hashes: Vec, - /// References the block to stop at, or zero to just fetch the maximum 500 blocks - pub stop_hash: Sha256dHash + /// The protocol version + pub version: u32, + /// Locator hashes --- ordered newest to oldest. The remote peer will + /// reply with its longest known chain, starting from a locator hash + /// if possible and block 1 otherwise. + pub locator_hashes: Vec, + /// References the block to stop at, or zero to just fetch the maximum 500 blocks + pub stop_hash: Sha256dHash } /// The `getheaders` message #[derive(PartialEq, Eq, Clone, Debug)] pub struct GetHeadersMessage { - /// The protocol version - pub version: u32, - /// Locator hashes --- ordered newest to oldest. The remote peer will - /// reply with its longest known chain, starting from a locator hash - /// if possible and block 1 otherwise. - pub locator_hashes: Vec, - /// References the header to stop at, or zero to just fetch the maximum 2000 headers - pub stop_hash: Sha256dHash + /// The protocol version + pub version: u32, + /// Locator hashes --- ordered newest to oldest. The remote peer will + /// reply with its longest known chain, starting from a locator hash + /// if possible and block 1 otherwise. + pub locator_hashes: Vec, + /// References the header to stop at, or zero to just fetch the maximum 2000 headers + pub stop_hash: Sha256dHash } /// An inventory object --- a reference to a Bitcoin object #[derive(PartialEq, Eq, Clone, Debug)] pub struct Inventory { - /// The type of object that is referenced - pub inv_type: InvType, - /// The object's hash - pub hash: Sha256dHash + /// The type of object that is referenced + pub inv_type: InvType, + /// The object's hash + pub hash: Sha256dHash } impl GetBlocksMessage { - /// Construct a new `getblocks` message - pub fn new(locator_hashes: Vec, stop_hash: Sha256dHash) -> GetBlocksMessage { - GetBlocksMessage { - version: constants::PROTOCOL_VERSION, - locator_hashes: locator_hashes.clone(), - stop_hash: stop_hash + /// Construct a new `getblocks` message + pub fn new(locator_hashes: Vec, stop_hash: Sha256dHash) -> GetBlocksMessage { + GetBlocksMessage { + version: constants::PROTOCOL_VERSION, + locator_hashes: locator_hashes.clone(), + stop_hash: stop_hash + } } - } } impl_consensus_encoding!(GetBlocksMessage, version, locator_hashes, stop_hash); impl GetHeadersMessage { - /// Construct a new `getheaders` message - pub fn new(locator_hashes: Vec, stop_hash: Sha256dHash) -> GetHeadersMessage { - GetHeadersMessage { - version: constants::PROTOCOL_VERSION, - locator_hashes: locator_hashes, - stop_hash: stop_hash + /// Construct a new `getheaders` message + pub fn new(locator_hashes: Vec, stop_hash: Sha256dHash) -> GetHeadersMessage { + GetHeadersMessage { + version: constants::PROTOCOL_VERSION, + locator_hashes: locator_hashes, + stop_hash: stop_hash + } } - } } impl_consensus_encoding!(GetHeadersMessage, version, locator_hashes, stop_hash); impl ConsensusEncodable for Inventory { - #[inline] - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - try!(match self.inv_type { - InvType::Error => 0u32, - InvType::Transaction => 1, - InvType::Block => 2 - }.consensus_encode(s)); - self.hash.consensus_encode(s) - } + #[inline] + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + try!(match self.inv_type { + InvType::Error => 0u32, + InvType::Transaction => 1, + InvType::Block => 2 + }.consensus_encode(s)); + self.hash.consensus_encode(s) + } } impl ConsensusDecodable for Inventory { - #[inline] - fn consensus_decode(d: &mut D) -> Result { - let int_type: u32 = try!(ConsensusDecodable::consensus_decode(d)); - Ok(Inventory { - inv_type: match int_type { - 0 => InvType::Error, - 1 => InvType::Transaction, - 2 => InvType::Block, - // TODO do not fail here - _ => { panic!("bad inventory type field") } - }, - hash: try!(ConsensusDecodable::consensus_decode(d)) - }) - } + #[inline] + fn consensus_decode(d: &mut D) -> Result { + let int_type: u32 = try!(ConsensusDecodable::consensus_decode(d)); + Ok(Inventory { + inv_type: match int_type { + 0 => InvType::Error, + 1 => InvType::Transaction, + 2 => InvType::Block, + // TODO do not fail here + _ => { panic!("bad inventory type field") } + }, + hash: try!(ConsensusDecodable::consensus_decode(d)) + }) + } } #[cfg(test)] mod tests { - use super::{GetHeadersMessage, GetBlocksMessage}; + use super::{GetHeadersMessage, GetBlocksMessage}; - use std::io; - use serialize::hex::FromHex; + use std::io; + use serialize::hex::FromHex; - use network::serialize::{deserialize, serialize}; - use std::default::Default; + use network::serialize::{deserialize, serialize}; + use std::default::Default; - #[test] - fn getblocks_message_test() { - let from_sat = "72110100014a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b0000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap(); - let genhash = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".from_hex().unwrap(); + #[test] + fn getblocks_message_test() { + let from_sat = "72110100014a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b0000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap(); + let genhash = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".from_hex().unwrap(); - let decode: io::Result = deserialize(from_sat.clone()); - assert!(decode.is_ok()); - let real_decode = decode.unwrap(); - assert_eq!(real_decode.version, 70002); - assert_eq!(real_decode.locator_hashes.len(), 1); - assert_eq!(serialize(&real_decode.locator_hashes[0]), Ok(genhash)); - assert_eq!(real_decode.stop_hash, Default::default()); + let decode: io::Result = deserialize(from_sat.clone()); + assert!(decode.is_ok()); + let real_decode = decode.unwrap(); + assert_eq!(real_decode.version, 70002); + assert_eq!(real_decode.locator_hashes.len(), 1); + assert_eq!(serialize(&real_decode.locator_hashes[0]), Ok(genhash)); + assert_eq!(real_decode.stop_hash, Default::default()); - assert_eq!(serialize(&real_decode), Ok(from_sat)); - } + assert_eq!(serialize(&real_decode), Ok(from_sat)); + } - #[test] - fn getheaders_message_test() { - let from_sat = "72110100014a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b0000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap(); - let genhash = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".from_hex().unwrap(); + #[test] + fn getheaders_message_test() { + let from_sat = "72110100014a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b0000000000000000000000000000000000000000000000000000000000000000".from_hex().unwrap(); + let genhash = "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b".from_hex().unwrap(); - let decode: io::Result = deserialize(from_sat.clone()); - assert!(decode.is_ok()); - let real_decode = decode.unwrap(); - assert_eq!(real_decode.version, 70002); - assert_eq!(real_decode.locator_hashes.len(), 1); - assert_eq!(serialize(&real_decode.locator_hashes[0]), Ok(genhash)); - assert_eq!(real_decode.stop_hash, Default::default()); + let decode: io::Result = deserialize(from_sat.clone()); + assert!(decode.is_ok()); + let real_decode = decode.unwrap(); + assert_eq!(real_decode.version, 70002); + assert_eq!(real_decode.locator_hashes.len(), 1); + assert_eq!(serialize(&real_decode.locator_hashes[0]), Ok(genhash)); + assert_eq!(real_decode.stop_hash, Default::default()); - assert_eq!(serialize(&real_decode), Ok(from_sat)); - } + assert_eq!(serialize(&real_decode), Ok(from_sat)); + } } diff --git a/src/network/socket.rs b/src/network/socket.rs index 13e874ae..a3667ca8 100644 --- a/src/network/socket.rs +++ b/src/network/socket.rs @@ -34,24 +34,24 @@ use util::{self, propagate_err}; /// Format an IP address in the 16-byte bitcoin protocol serialization fn ipaddr_to_bitcoin_addr(ipaddr: &ip::IpAddr) -> [u16; 8] { - match *ipaddr { - ip::IpAddr::V4(ref addr) => &addr.to_ipv6_mapped(), - ip::IpAddr::V6(ref addr) => addr - }.segments() + match *ipaddr { + ip::IpAddr::V4(ref addr) => &addr.to_ipv6_mapped(), + ip::IpAddr::V6(ref addr) => addr + }.segments() } /// A network socket along with information about the peer #[derive(Clone)] pub struct Socket { - /// The underlying TCP socket - socket: Arc>>, - /// Services supported by us - pub services: u64, - /// Our user agent - pub user_agent: String, - /// Nonce to identify our `version` messages - pub version_nonce: u64, - /// Network magic + /// The underlying TCP socket + socket: Arc>>, + /// Services supported by us + pub services: u64, + /// Our user agent + pub user_agent: String, + /// Nonce to identify our `version` messages + pub version_nonce: u64, + /// Network magic pub magic: u32 } diff --git a/src/util/base58.rs b/src/util/base58.rs index 8a8ecf08..f50ddca1 100644 --- a/src/util/base58.rs +++ b/src/util/base58.rs @@ -23,204 +23,204 @@ use util::hash::Sha256dHash; /// An error that might occur during base58 decoding #[derive(Debug, PartialEq, Eq, Clone)] pub enum Error { - /// Invalid character encountered - BadByte(u8), - /// Checksum was not correct (expected, actual) - BadChecksum(u32, u32), - /// The length (in bytes) of the object was not correct - InvalidLength(usize), - /// Version byte(s) were not recognized - InvalidVersion(Vec), - /// Checked data was less than 4 bytes - TooShort(usize), - /// Any other error - Other(String) + /// Invalid character encountered + BadByte(u8), + /// Checksum was not correct (expected, actual) + BadChecksum(u32, u32), + /// The length (in bytes) of the object was not correct + InvalidLength(usize), + /// Version byte(s) were not recognized + InvalidVersion(Vec), + /// Checked data was less than 4 bytes + TooShort(usize), + /// Any other error + Other(String) } static BASE58_CHARS: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; static BASE58_DIGITS: [Option; 128] = [ - None, None, None, None, None, None, None, None, // 0-7 - None, None, None, None, None, None, None, None, // 8-15 - None, None, None, None, None, None, None, None, // 16-23 - None, None, None, None, None, None, None, None, // 24-31 - None, None, None, None, None, None, None, None, // 32-39 - None, None, None, None, None, None, None, None, // 40-47 - None, Some(0), Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), // 48-55 - Some(7), Some(8), None, None, None, None, None, None, // 56-63 - None, Some(9), Some(10), Some(11), Some(12), Some(13), Some(14), Some(15), // 64-71 - Some(16), None, Some(17), Some(18), Some(19), Some(20), Some(21), None, // 72-79 - Some(22), Some(23), Some(24), Some(25), Some(26), Some(27), Some(28), Some(29), // 80-87 - Some(30), Some(31), Some(32), None, None, None, None, None, // 88-95 - None, Some(33), Some(34), Some(35), Some(36), Some(37), Some(38), Some(39), // 96-103 - Some(40), Some(41), Some(42), Some(43), None, Some(44), Some(45), Some(46), // 104-111 - Some(47), Some(48), Some(49), Some(50), Some(51), Some(52), Some(53), Some(54), // 112-119 - Some(55), Some(56), Some(57), None, None, None, None, None, // 120-127 + None, None, None, None, None, None, None, None, // 0-7 + None, None, None, None, None, None, None, None, // 8-15 + None, None, None, None, None, None, None, None, // 16-23 + None, None, None, None, None, None, None, None, // 24-31 + None, None, None, None, None, None, None, None, // 32-39 + None, None, None, None, None, None, None, None, // 40-47 + None, Some(0), Some(1), Some(2), Some(3), Some(4), Some(5), Some(6), // 48-55 + Some(7), Some(8), None, None, None, None, None, None, // 56-63 + None, Some(9), Some(10), Some(11), Some(12), Some(13), Some(14), Some(15), // 64-71 + Some(16), None, Some(17), Some(18), Some(19), Some(20), Some(21), None, // 72-79 + Some(22), Some(23), Some(24), Some(25), Some(26), Some(27), Some(28), Some(29), // 80-87 + Some(30), Some(31), Some(32), None, None, None, None, None, // 88-95 + None, Some(33), Some(34), Some(35), Some(36), Some(37), Some(38), Some(39), // 96-103 + Some(40), Some(41), Some(42), Some(43), None, Some(44), Some(45), Some(46), // 104-111 + Some(47), Some(48), Some(49), Some(50), Some(51), Some(52), Some(53), Some(54), // 112-119 + Some(55), Some(56), Some(57), None, None, None, None, None, // 120-127 ]; /// Trait for objects which can be read as base58 -pub trait FromBase58 { - /// Constructs an object flrom the byte-encoding (base 256) - /// representation of its base58 format - fn from_base58_layout(data: Vec) -> Result; +pub trait FromBase58: Sized { + /// Constructs an object flrom the byte-encoding (base 256) + /// representation of its base58 format + fn from_base58_layout(data: Vec) -> Result; - /// Obtain an object from its base58 encoding - fn from_base58(data: &str) -> Result { - // 11/15 is just over log_256(58) - let mut scratch = Vec::from_elem(1 + data.len() * 11 / 15, 0u8); - // Build in base 256 - for d58 in data.bytes() { - // Compute "X = X * 58 + next_digit" in base 256 - if d58 as usize > BASE58_DIGITS.len() { - return Err(Error::BadByte(d58)); - } - let mut carry = match BASE58_DIGITS[d58 as usize] { - Some(d58) => d58 as u32, - None => { return Err(Error::BadByte(d58)); } - }; - for d256 in scratch.iter_mut().rev() { - carry += *d256 as u32 * 58; - *d256 = carry as u8; - carry /= 256; - } - assert_eq!(carry, 0); + /// Obtain an object from its base58 encoding + fn from_base58(data: &str) -> Result { + // 11/15 is just over log_256(58) + let mut scratch = vec![0u8; 1 + data.len() * 11 / 15]; + // Build in base 256 + for d58 in data.bytes() { + // Compute "X = X * 58 + next_digit" in base 256 + if d58 as usize > BASE58_DIGITS.len() { + return Err(Error::BadByte(d58)); + } + let mut carry = match BASE58_DIGITS[d58 as usize] { + Some(d58) => d58 as u32, + None => { return Err(Error::BadByte(d58)); } + }; + for d256 in scratch.iter_mut().rev() { + carry += *d256 as u32 * 58; + *d256 = carry as u8; + carry /= 256; + } + assert_eq!(carry, 0); + } + + // Copy leading zeroes directly + let mut ret: Vec = data.bytes().take_while(|&x| x == BASE58_CHARS[0]) + .map(|_| 0) + .collect(); + // Copy rest of string + ret.extend(scratch.into_iter().skip_while(|&x| x == 0)); + FromBase58::from_base58_layout(ret) } - // Copy leading zeroes directly - let mut ret: Vec = data.bytes().take_while(|&x| x == BASE58_CHARS[0]) - .map(|_| 0) - .collect(); - // Copy rest of string - ret.extend(scratch.into_iter().skip_while(|&x| x == 0)); - FromBase58::from_base58_layout(ret) - } - - /// Obtain an object from its base58check encoding - fn from_base58check(data: &str) -> Result { - let mut ret: Vec = try!(FromBase58::from_base58(data)); - if ret.len() < 4 { - return Err(Error::TooShort(ret.len())); + /// Obtain an object from its base58check encoding + fn from_base58check(data: &str) -> Result { + let mut ret: Vec = try!(FromBase58::from_base58(data)); + if ret.len() < 4 { + return Err(Error::TooShort(ret.len())); + } + let ck_start = ret.len() - 4; + let expected = Sha256dHash::from_data(&ret[..ck_start]).into_le().low_u32(); + let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]); + if expected != actual { + return Err(Error::BadChecksum(expected, actual)); + } + + ret.truncate(ck_start); + FromBase58::from_base58_layout(ret) } - let ck_start = ret.len() - 4; - let expected = Sha256dHash::from_data(&ret[..ck_start]).into_le().low_u32(); - let actual = LittleEndian::read_u32(&ret[ck_start..(ck_start + 4)]); - if expected != actual { - return Err(Error::BadChecksum(expected, actual)); - } - - ret.truncate(ck_start); - FromBase58::from_base58_layout(ret) - } } /// Directly encode a slice as base58 pub fn base58_encode_slice(data: &[u8]) -> String { - // 7/5 is just over log_58(256) - let mut scratch = vec![0u8; 1 + data.len() * 7 / 5]; - // Build in base 58 - for &d256 in data.base58_layout().iter() { - // Compute "X = X * 256 + next_digit" in base 58 - let mut carry = d256 as u32; - for d58 in scratch.iter_mut().rev() { - carry += (*d58 as u32) << 8; - *d58 = (carry % 58) as u8; - carry /= 58; + // 7/5 is just over log_58(256) + let mut scratch = vec![0u8; 1 + data.len() * 7 / 5]; + // Build in base 58 + for &d256 in data.base58_layout().iter() { + // Compute "X = X * 256 + next_digit" in base 58 + let mut carry = d256 as u32; + for d58 in scratch.iter_mut().rev() { + carry += (*d58 as u32) << 8; + *d58 = (carry % 58) as u8; + carry /= 58; + } + assert_eq!(carry, 0); } - assert_eq!(carry, 0); - } - // Unsafely translate the bytes to a utf8 string - unsafe { - // Copy leading zeroes directly - let mut ret: Vec = str::from_utf8(data.iter().take_while(|&&x| x == 0) - .map(|_| BASE58_CHARS[0]) - .collect()).unwrap(); - // Copy rest of string - ret.as_mut_vec().extend(scratch.into_iter().skip_while(|&x| x == 0) - .map(|x| BASE58_CHARS[x as usize])); - ret - } + // Unsafely translate the bytes to a utf8 string + unsafe { + // Copy leading zeroes directly + let mut ret: Vec = data.iter().take_while(|&&x| x == 0) + .map(|_| BASE58_CHARS[0]) + .collect(); + // Copy rest of string + ret.extend(scratch.into_iter().skip_while(|&x| x == 0) + .map(|x| BASE58_CHARS[x as usize])); + String::from_utf8(ret).unwrap() + } } /// Trait for objects which can be written as base58 pub trait ToBase58 { - /// The serialization to be converted into base58 - fn base58_layout(&self) -> Vec; + /// The serialization to be converted into base58 + fn base58_layout(&self) -> Vec; - /// Obtain a string with the base58 encoding of the object - fn to_base58(&self) -> String { - base58_encode_slice(&self.base58_layout()[..]) - } + /// Obtain a string with the base58 encoding of the object + fn to_base58(&self) -> String { + base58_encode_slice(&self.base58_layout()[..]) + } - /// Obtain a string with the base58check encoding of the object - /// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.) - fn to_base58check(&self) -> String { - let mut data = self.base58_layout(); - let checksum = Sha256dHash::from_data(&data).into_le().low_u32(); - data.write_u32::(checksum); - base58_encode_slice(&data) - } + /// Obtain a string with the base58check encoding of the object + /// (Tack the first 4 256-digits of the object's Bitcoin hash onto the end.) + fn to_base58check(&self) -> String { + let mut data = self.base58_layout(); + let checksum = Sha256dHash::from_data(&data).into_le().low_u32(); + data.write_u32::(checksum); + base58_encode_slice(&data) + } } // Trivial implementations for slices and vectors impl<'a> ToBase58 for &'a [u8] { - fn base58_layout(&self) -> Vec { self.to_vec() } - fn to_base58(&self) -> String { base58_encode_slice(*self) } + fn base58_layout(&self) -> Vec { self.to_vec() } + fn to_base58(&self) -> String { base58_encode_slice(*self) } } impl FromBase58 for Vec { - fn from_base58_layout(data: Vec) -> Result, Error> { - Ok(data) - } + fn from_base58_layout(data: Vec) -> Result, Error> { + Ok(data) + } } #[cfg(test)] mod tests { - use serialize::hex::FromHex; + use serialize::hex::FromHex; - use super::ToBase58; - use super::FromBase58; + use super::ToBase58; + use super::FromBase58; - #[test] - fn test_base58_encode() { - // Basics - assert_eq!([0].as_slice().to_base58().as_slice(), "1"); - assert_eq!([1].as_slice().to_base58().as_slice(), "2"); - assert_eq!([58].as_slice().to_base58().as_slice(), "21"); - assert_eq!([13, 36].as_slice().to_base58().as_slice(), "211"); + #[test] + fn test_base58_encode() { + // Basics + assert_eq!([0].as_slice().to_base58().as_slice(), "1"); + assert_eq!([1].as_slice().to_base58().as_slice(), "2"); + assert_eq!([58].as_slice().to_base58().as_slice(), "21"); + assert_eq!([13, 36].as_slice().to_base58().as_slice(), "211"); - // Leading zeroes - assert_eq!([0, 13, 36].as_slice().to_base58().as_slice(), "1211"); - assert_eq!([0, 0, 0, 0, 13, 36].as_slice().to_base58().as_slice(), "1111211"); + // Leading zeroes + assert_eq!([0, 13, 36].as_slice().to_base58().as_slice(), "1211"); + assert_eq!([0, 0, 0, 0, 13, 36].as_slice().to_base58().as_slice(), "1111211"); - // Addresses - assert_eq!("00f8917303bfa8ef24f292e8fa1419b20460ba064d".from_hex().unwrap().to_base58check().as_slice(), - "1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH"); - } + // Addresses + assert_eq!("00f8917303bfa8ef24f292e8fa1419b20460ba064d".from_hex().unwrap().to_base58check().as_slice(), + "1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH"); + } - #[test] - fn test_base58_decode() { - // Basics - assert_eq!(FromBase58::from_base58("1"), Ok(vec![0u8])); - assert_eq!(FromBase58::from_base58("2"), Ok(vec![1u8])); - assert_eq!(FromBase58::from_base58("21"), Ok(vec![58u8])); - assert_eq!(FromBase58::from_base58("211"), Ok(vec![13u8, 36])); + #[test] + fn test_base58_decode() { + // Basics + assert_eq!(FromBase58::from_base58("1"), Ok(vec![0u8])); + assert_eq!(FromBase58::from_base58("2"), Ok(vec![1u8])); + assert_eq!(FromBase58::from_base58("21"), Ok(vec![58u8])); + assert_eq!(FromBase58::from_base58("211"), Ok(vec![13u8, 36])); - // Leading zeroes - assert_eq!(FromBase58::from_base58("1211"), Ok(vec![0u8, 13, 36])); - assert_eq!(FromBase58::from_base58("111211"), Ok(vec![0u8, 0, 0, 13, 36])); + // Leading zeroes + assert_eq!(FromBase58::from_base58("1211"), Ok(vec![0u8, 13, 36])); + assert_eq!(FromBase58::from_base58("111211"), Ok(vec![0u8, 0, 0, 13, 36])); - // Addresses - assert_eq!(FromBase58::from_base58check("1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH"), - Ok("00f8917303bfa8ef24f292e8fa1419b20460ba064d".from_hex().unwrap())) - } + // Addresses + assert_eq!(FromBase58::from_base58check("1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH"), + Ok("00f8917303bfa8ef24f292e8fa1419b20460ba064d".from_hex().unwrap())) + } - #[test] - fn test_base58_roundtrip() { - let s = "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs"; - let v: Vec = FromBase58::from_base58check(s).unwrap(); - assert_eq!(v.to_base58check().as_slice(), s); - assert_eq!(FromBase58::from_base58check(v.to_base58check().as_slice()), Ok(v)); - } + #[test] + fn test_base58_roundtrip() { + let s = "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs"; + let v: Vec = FromBase58::from_base58check(s).unwrap(); + assert_eq!(v.to_base58check().as_slice(), s); + assert_eq!(FromBase58::from_base58check(v.to_base58check().as_slice()), Ok(v)); + } } diff --git a/src/util/hash.rs b/src/util/hash.rs index 20ef8e7b..a50e92f2 100644 --- a/src/util/hash.rs +++ b/src/util/hash.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without @@ -38,9 +38,9 @@ pub struct Sha256dHash([u8; 32]); impl_array_newtype!(Sha256dHash, u8, 32); impl ::std::fmt::Debug for Sha256dHash { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - write!(f, "{}", self.be_hex_string().as_slice()) - } + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + write!(f, "{}", self.be_hex_string().as_slice()) + } } /// A RIPEMD-160 hash @@ -60,100 +60,100 @@ pub struct Hash48((u8, u8, u8, u8, u8, u8)); pub struct Hash64((u8, u8, u8, u8, u8, u8, u8, u8)); impl Ripemd160Hash { - /// Create a hash by hashing some data - pub fn from_data(data: &[u8]) -> Ripemd160Hash { - let mut ret = [0; 20]; - let mut rmd = Ripemd160::new(); - rmd.input(data); - rmd.result(&mut ret); - Ripemd160Hash(ret) - } + /// Create a hash by hashing some data + pub fn from_data(data: &[u8]) -> Ripemd160Hash { + let mut ret = [0; 20]; + let mut rmd = Ripemd160::new(); + rmd.input(data); + rmd.result(&mut ret); + Ripemd160Hash(ret) + } } // This doesn't make much sense to me, but is implicit behaviour // in the C++ reference client impl Default for Sha256dHash { - #[inline] - fn default() -> Sha256dHash { Sha256dHash([0u8; 32]) } + #[inline] + fn default() -> Sha256dHash { Sha256dHash([0u8; 32]) } } impl Sha256dHash { - /// Create a hash by hashing some data - pub fn from_data(data: &[u8]) -> Sha256dHash { - let Sha256dHash(mut ret): Sha256dHash = Default::default(); - let mut sha2 = Sha256::new(); - sha2.input(data); - sha2.result(&mut ret); - sha2.reset(); - sha2.input(&ret); - sha2.result(&mut ret); - Sha256dHash(ret) - } - - /// Converts a hash to a little-endian Uint256 - #[inline] - pub fn into_le(self) -> Uint256 { - let Sha256dHash(data) = self; - let mut ret: [u64; 4] = unsafe { transmute(data) }; - for x in (&mut ret).iter_mut() { *x = x.to_le(); } - Uint256(ret) - } - - /// Converts a hash to a big-endian Uint256 - #[inline] - pub fn into_be(self) -> Uint256 { - let Sha256dHash(mut data) = self; - data.reverse(); - let mut ret: [u64; 4] = unsafe { transmute(data) }; - for x in (&mut ret).iter_mut() { *x = x.to_be(); } - Uint256(ret) - } - - /// Converts a hash to a Hash32 by truncation - #[inline] - pub fn into_hash32(self) -> Hash32 { - let Sha256dHash(data) = self; - unsafe { transmute([data[0], data[8], data[16], data[24]]) } - } - - /// Converts a hash to a Hash48 by truncation - #[inline] - pub fn into_hash48(self) -> Hash48 { - let Sha256dHash(data) = self; - unsafe { transmute([data[0], data[6], data[12], data[18], data[24], data[30]]) } - } - - /// Human-readable hex output - - /// Converts a hash to a Hash64 by truncation - #[inline] - pub fn into_hash64(self) -> Hash64 { - let Sha256dHash(data) = self; - unsafe { transmute([data[0], data[4], data[8], data[12], - data[16], data[20], data[24], data[28]]) } - } - - /// Human-readable hex output - pub fn le_hex_string(&self) -> String { - let &Sha256dHash(data) = self; - let mut ret = String::with_capacity(64); - for i in 0..32 { - ret.push(from_digit((data[i] / 0x10) as u32, 16).unwrap()); - ret.push(from_digit((data[i] & 0x0f) as u32, 16).unwrap()); + /// Create a hash by hashing some data + pub fn from_data(data: &[u8]) -> Sha256dHash { + let Sha256dHash(mut ret): Sha256dHash = Default::default(); + let mut sha2 = Sha256::new(); + sha2.input(data); + sha2.result(&mut ret); + sha2.reset(); + sha2.input(&ret); + sha2.result(&mut ret); + Sha256dHash(ret) } - ret - } - /// Human-readable hex output - pub fn be_hex_string(&self) -> String { - let &Sha256dHash(data) = self; - let mut ret = String::with_capacity(64); - for i in (0..32).rev() { - ret.push(from_digit((data[i] / 0x10) as u32, 16).unwrap()); - ret.push(from_digit((data[i] & 0x0f) as u32, 16).unwrap()); + /// Converts a hash to a little-endian Uint256 + #[inline] + pub fn into_le(self) -> Uint256 { + let Sha256dHash(data) = self; + let mut ret: [u64; 4] = unsafe { transmute(data) }; + for x in (&mut ret).iter_mut() { *x = x.to_le(); } + Uint256(ret) + } + + /// Converts a hash to a big-endian Uint256 + #[inline] + pub fn into_be(self) -> Uint256 { + let Sha256dHash(mut data) = self; + data.reverse(); + let mut ret: [u64; 4] = unsafe { transmute(data) }; + for x in (&mut ret).iter_mut() { *x = x.to_be(); } + Uint256(ret) + } + + /// Converts a hash to a Hash32 by truncation + #[inline] + pub fn into_hash32(self) -> Hash32 { + let Sha256dHash(data) = self; + unsafe { transmute([data[0], data[8], data[16], data[24]]) } + } + + /// Converts a hash to a Hash48 by truncation + #[inline] + pub fn into_hash48(self) -> Hash48 { + let Sha256dHash(data) = self; + unsafe { transmute([data[0], data[6], data[12], data[18], data[24], data[30]]) } + } + + /// Human-readable hex output + + /// Converts a hash to a Hash64 by truncation + #[inline] + pub fn into_hash64(self) -> Hash64 { + let Sha256dHash(data) = self; + unsafe { transmute([data[0], data[4], data[8], data[12], + data[16], data[20], data[24], data[28]]) } + } + + /// Human-readable hex output + pub fn le_hex_string(&self) -> String { + let &Sha256dHash(data) = self; + let mut ret = String::with_capacity(64); + for i in 0..32 { + ret.push(from_digit((data[i] / 0x10) as u32, 16).unwrap()); + ret.push(from_digit((data[i] & 0x0f) as u32, 16).unwrap()); + } + ret + } + + /// Human-readable hex output + pub fn be_hex_string(&self) -> String { + let &Sha256dHash(data) = self; + let mut ret = String::with_capacity(64); + for i in (0..32).rev() { + ret.push(from_digit((data[i] / 0x10) as u32, 16).unwrap()); + ret.push(from_digit((data[i] & 0x0f) as u32, 16).unwrap()); + } + ret } - ret - } } // Note that this outputs hashes as big endian hex numbers, so this should be @@ -161,48 +161,48 @@ impl Sha256dHash { // little-endian and should be done using the consensus `encodable::ConsensusEncodable` // interface. impl serde::Serialize for Sha256dHash { - fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> - where S: serde::Serializer, - { - serializer.visit_str(&self.be_hex_string()) - } + fn serialize(&self, serializer: &mut S) -> Result<(), S::Error> + where S: serde::Serializer, + { + serializer.visit_str(&self.be_hex_string()) + } } impl serde::Deserialize for Sha256dHash { - #[inline] - fn deserialize(d: &mut D) -> Result - where D: serde::Deserializer - { - use serialize::hex::FromHex; + #[inline] + fn deserialize(d: &mut D) -> Result + where D: serde::Deserializer + { + use serialize::hex::FromHex; - struct Sha256dHashVisitor; - impl serde::de::Visitor for Sha256dHashVisitor { - type Value = Sha256dHash; + struct Sha256dHashVisitor; + impl serde::de::Visitor for Sha256dHashVisitor { + type Value = Sha256dHash; - fn visit_string(&mut self, v: String) -> Result - where E: serde::de::Error - { - self.visit_str(&v) - } + fn visit_string(&mut self, v: String) -> Result + where E: serde::de::Error + { + self.visit_str(&v) + } - fn visit_str(&mut self, hex_str: &str) -> Result - where E: serde::de::Error - { - if hex_str.len() != 64 { - return Err(serde::de::Error::syntax_error()); + fn visit_str(&mut self, hex_str: &str) -> Result + where E: serde::de::Error + { + if hex_str.len() != 64 { + return Err(serde::de::Error::syntax_error()); + } + let raw_str = try!(hex_str.from_hex() + .map_err(|_| serde::de::Error::syntax_error())); + let mut ret = [0u8; 32]; + for i in 0..32 { + ret[i] = raw_str[31 - i]; + } + Ok(Sha256dHash(ret)) + } } - let raw_str = try!(hex_str.from_hex() - .map_err(|_| serde::de::Error::syntax_error())); - let mut ret = [0u8; 32]; - for i in 0..32 { - ret[i] = raw_str[31 - i]; - } - Ok(Sha256dHash(ret)) - } + + d.visit(Sha256dHashVisitor) } - - d.visit(Sha256dHashVisitor) - } } // Consensus encoding (little-endian) @@ -212,108 +212,108 @@ impl_newtype_consensus_encoding!(Hash64); impl_newtype_consensus_encoding!(Sha256dHash); impl fmt::LowerHex for Sha256dHash { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let &Sha256dHash(data) = self; - let mut rv = [0; 64]; - for ch in data.iter().rev() { - try!(write!(f, "{:02x}", ch)); + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let &Sha256dHash(data) = self; + let mut rv = [0; 64]; + for ch in data.iter().rev() { + try!(write!(f, "{:02x}", ch)); + } + Ok(()) } - Ok(()) - } } /// Any collection of objects for which a merkle root makes sense to calculate pub trait MerkleRoot { - /// Construct a merkle tree from a collection, with elements ordered as - /// they were in the original collection, and return the merkle root. - fn merkle_root(&self) -> Sha256dHash; + /// Construct a merkle tree from a collection, with elements ordered as + /// they were in the original collection, and return the merkle root. + fn merkle_root(&self) -> Sha256dHash; } impl<'a, T: BitcoinHash> MerkleRoot for &'a [T] { - fn merkle_root(&self) -> Sha256dHash { - fn merkle_root(data: Vec) -> Sha256dHash { - // Base case - if data.len() < 1 { - return Default::default(); - } - if data.len() < 2 { - return data[0]; - } - // Recursion - let mut next = vec![]; - for idx in 0..((data.len() + 1) / 2) { - let idx1 = 2 * idx; - let idx2 = min(idx1 + 1, data.len() - 1); - let mut encoder = RawEncoder::new(Cursor::new(vec![])); - data[idx1].consensus_encode(&mut encoder).unwrap(); - data[idx2].consensus_encode(&mut encoder).unwrap(); - next.push(encoder.unwrap().into_inner().bitcoin_hash()); - } - merkle_root(next) + fn merkle_root(&self) -> Sha256dHash { + fn merkle_root(data: Vec) -> Sha256dHash { + // Base case + if data.len() < 1 { + return Default::default(); + } + if data.len() < 2 { + return data[0]; + } + // Recursion + let mut next = vec![]; + for idx in 0..((data.len() + 1) / 2) { + let idx1 = 2 * idx; + let idx2 = min(idx1 + 1, data.len() - 1); + let mut encoder = RawEncoder::new(Cursor::new(vec![])); + data[idx1].consensus_encode(&mut encoder).unwrap(); + data[idx2].consensus_encode(&mut encoder).unwrap(); + next.push(encoder.into_inner().into_inner().bitcoin_hash()); + } + merkle_root(next) + } + merkle_root(self.iter().map(|obj| obj.bitcoin_hash()).collect()) } - merkle_root(self.iter().map(|obj| obj.bitcoin_hash()).collect()) - } } impl MerkleRoot for Vec { - fn merkle_root(&self) -> Sha256dHash { - (&self[..]).merkle_root() - } + fn merkle_root(&self) -> Sha256dHash { + (&self[..]).merkle_root() + } } #[cfg(test)] mod tests { - use std::prelude::*; - use std::io::Cursor; - use std::str::from_utf8; - use serialize::Encodable; - use serialize::json; + use std::prelude::*; + use std::io::Cursor; + use std::str::from_utf8; + use serialize::Encodable; + use serialize::json; - use network::serialize::{serialize, deserialize}; - use util::hash::Sha256dHash; + use network::serialize::{serialize, deserialize}; + use util::hash::Sha256dHash; - #[test] - fn test_sha256d() { - // nb the 5df6... output is the one you get from sha256sum. this is the - // "little-endian" hex string since it matches the in-memory representation - // of a Uint256 (which is little-endian) after transmutation - assert_eq!(Sha256dHash::from_data(&[]).le_hex_string(), - "5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456".to_string()); - assert_eq!(Sha256dHash::from_data(&[]).be_hex_string(), - "56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d".to_string()); - } - - #[test] - fn test_consenus_encode_roundtrip() { - let hash = Sha256dHash::from_data(&[]); - let serial = serialize(&hash).unwrap(); - let deserial = deserialize(serial).unwrap(); - assert_eq!(hash, deserial); - } - - #[test] - fn test_hash_encode_decode() { - let hash = Sha256dHash::from_data(&[]); - let mut writer = Cursor::new(vec![]); - { - let mut encoder = json::Encoder::new(&mut writer); - assert!(hash.encode(&mut encoder).is_ok()); + #[test] + fn test_sha256d() { + // nb the 5df6... output is the one you get from sha256sum. this is the + // "little-endian" hex string since it matches the in-memory representation + // of a Uint256 (which is little-endian) after transmutation + assert_eq!(Sha256dHash::from_data(&[]).le_hex_string(), + "5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456".to_string()); + assert_eq!(Sha256dHash::from_data(&[]).be_hex_string(), + "56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d".to_string()); } - let res = writer.unwrap(); - assert_eq!(&res.as_slice(), - "\"56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d\"".as_bytes()); - assert_eq!(json::decode(from_utf8(res.as_slice()).unwrap()), Ok(hash)); - } - #[test] - fn test_sighash_single_vec() { - let one = Sha256dHash([1, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0]); - assert_eq!(Some(one.into_le()), FromPrimitive::from_u64(1)); - assert_eq!(Some(one.into_le().low_128()), FromPrimitive::from_u64(1)); - } + #[test] + fn test_consenus_encode_roundtrip() { + let hash = Sha256dHash::from_data(&[]); + let serial = serialize(&hash).unwrap(); + let deserial = deserialize(serial).unwrap(); + assert_eq!(hash, deserial); + } + + #[test] + fn test_hash_encode_decode() { + let hash = Sha256dHash::from_data(&[]); + let mut writer = Cursor::new(vec![]); + { + let mut encoder = json::Encoder::new(&mut writer); + assert!(hash.encode(&mut encoder).is_ok()); + } + let res = writer.unwrap(); + assert_eq!(&res.as_slice(), + "\"56944c5d3f98413ef45cf54545538103cc9f298e0575820ad3591376e2e0f65d\"".as_bytes()); + assert_eq!(json::decode(from_utf8(res.as_slice()).unwrap()), Ok(hash)); + } + + #[test] + fn test_sighash_single_vec() { + let one = Sha256dHash([1, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0]); + assert_eq!(Some(one.into_le()), FromPrimitive::from_u64(1)); + assert_eq!(Some(one.into_le().low_128()), FromPrimitive::from_u64(1)); + } } diff --git a/src/util/iter.rs b/src/util/iter.rs index 69726136..4be55a4f 100644 --- a/src/util/iter.rs +++ b/src/util/iter.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -19,62 +19,62 @@ /// An iterator that returns pairs of elements pub struct Pair - where I: Iterator + where I: Iterator { - iter: I, - last_elem: Option + iter: I, + last_elem: Option } impl Iterator for Pair { - type Item = (I::Item, I::Item); + type Item = (I::Item, I::Item); - #[inline] - fn next(&mut self) -> Option<(I::Item, I::Item)> { - let elem1 = self.iter.next(); - if elem1.is_none() { - None - } else { - let elem2 = self.iter.next(); - if elem2.is_none() { - self.last_elem = elem1; - None - } else { - Some((elem1.unwrap(), elem2.unwrap())) - } + #[inline] + fn next(&mut self) -> Option<(I::Item, I::Item)> { + let elem1 = self.iter.next(); + if elem1.is_none() { + None + } else { + let elem2 = self.iter.next(); + if elem2.is_none() { + self.last_elem = elem1; + None + } else { + Some((elem1.unwrap(), elem2.unwrap())) + } + } } - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - match self.iter.size_hint() { - (n, None) => (n/2, None), - (n, Some(m)) => (n/2, Some(m/2)) + #[inline] + fn size_hint(&self) -> (usize, Option) { + match self.iter.size_hint() { + (n, None) => (n/2, None), + (n, Some(m)) => (n/2, Some(m/2)) + } } - } } impl Pair { - /// Returns the last element of the iterator if there were an odd - /// number of elements remaining before it was Pair-ified. - #[inline] - pub fn remainder(self) -> Option { - self.last_elem - } + /// Returns the last element of the iterator if there were an odd + /// number of elements remaining before it was Pair-ified. + #[inline] + pub fn remainder(self) -> Option { + self.last_elem + } } /// Returns an iterator that returns elements of the original iterator 2 at a time pub trait Pairable { - /// Returns an iterator that returns elements of the original iterator 2 at a time - fn pair(self) -> Pair; + /// Returns an iterator that returns elements of the original iterator 2 at a time + fn pair(self) -> Pair; } impl Pairable for I { - /// Creates an iterator that yields pairs ef elements from the underlying - /// iterator, yielding `None` when there are fewer than two elements to - /// return. - #[inline] - fn pair(self) -> Pair { - Pair {iter: self, last_elem: None } - } + /// Creates an iterator that yields pairs ef elements from the underlying + /// iterator, yielding `None` when there are fewer than two elements to + /// return. + #[inline] + fn pair(self) -> Pair { + Pair {iter: self, last_elem: None } + } } diff --git a/src/util/misc.rs b/src/util/misc.rs index 59f9d414..035b82b3 100644 --- a/src/util/misc.rs +++ b/src/util/misc.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -22,132 +22,132 @@ use util::iter::Pairable; /// Convert a hexadecimal-encoded string to its corresponding bytes pub fn hex_bytes(s: &str) -> Result, Error> { - let mut v = vec![]; - let mut iter = s.chars().pair(); - // Do the parsing - try!(iter.fold(Ok(()), |e, (f, s)| - if e.is_err() { e } - else { - match (f.to_digit(16), s.to_digit(16)) { - (None, _) => Err(Error::Detail( - format!("expected hex, got {:}", f), - Box::new(Error::ParseFailed) + let mut v = vec![]; + let mut iter = s.chars().pair(); + // Do the parsing + try!(iter.fold(Ok(()), |e, (f, s)| + if e.is_err() { e } + else { + match (f.to_digit(16), s.to_digit(16)) { + (None, _) => Err(Error::Detail( + format!("expected hex, got {:}", f), + Box::new(Error::ParseFailed) + )), + (_, None) => Err(Error::Detail( + format!("expected hex, got {:}", s), + Box::new(Error::ParseFailed) + )), + (Some(f), Some(s)) => { v.push((f * 0x10 + s) as u8); Ok(()) } + } + } + )); + // Check that there was no remainder + match iter.remainder() { + Some(_) => Err(Error::Detail( + format!("hexstring of odd length"), + Box::new(Error::ParseFailed) )), - (_, None) => Err(Error::Detail( - format!("expected hex, got {:}", s), - Box::new(Error::ParseFailed) - )), - (Some(f), Some(s)) => { v.push((f * 0x10 + s) as u8); Ok(()) } - } + None => Ok(v) } - )); - // Check that there was no remainder - match iter.remainder() { - Some(_) => Err(Error::Detail( - format!("hexstring of odd length"), - Box::new(Error::ParseFailed) - )), - None => Ok(v) - } } /// Dump an error message to the screen /// TODO all uses of this should be replaced with some sort of logging infrastructure pub fn consume_err(s: &str, res: Result) { - match res { - Ok(_) => {}, - Err(e) => { println!("{}: {:?}", s, e); } - }; + match res { + Ok(_) => {}, + Err(e) => { println!("{}: {:?}", s, e); } + }; } /// Search for `needle` in the vector `haystack` and remove every /// instance of it, returning the number of instances removed. /// Loops through the vector opcode by opcode, skipping pushed data. pub fn script_find_and_remove(haystack: &mut Vec, needle: &[u8]) -> usize { - if needle.len() > haystack.len() { return 0; } - if needle.len() == 0 { return 0; } + if needle.len() > haystack.len() { return 0; } + if needle.len() == 0 { return 0; } - let mut top = haystack.len() - needle.len(); - let mut n_deleted = 0; + let mut top = haystack.len() - needle.len(); + let mut n_deleted = 0; - let mut i = 0; - while i <= top { - if &haystack[i..(i + needle.len())] == needle { - let v = &mut haystack; - for j in i..top { - v.swap(j + needle.len(), j); - } - n_deleted += 1; - // This is ugly but prevents infinite loop in case of overflow - let overflow = top < needle.len(); - top -= needle.len(); - if overflow { break; } - } else { - i += match opcodes::All::from_u8((*haystack)[i]).classify() { - opcodes::Class::PushBytes(n) => n + 1, - opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA1) => 2, - opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA2) => 3, - opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA4) => 5, - _ => 1 - }; + let mut i = 0; + while i <= top { + if &haystack[i..(i + needle.len())] == needle { + let v = &mut haystack; + for j in i..top { + v.swap(j + needle.len(), j); + } + n_deleted += 1; + // This is ugly but prevents infinite loop in case of overflow + let overflow = top < needle.len(); + top -= needle.len(); + if overflow { break; } + } else { + i += match opcodes::All::from_u8((*haystack)[i]).classify() { + opcodes::Class::PushBytes(n) => n as usize + 1, + opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA1) => 2, + opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA2) => 3, + opcodes::Class::Ordinary(opcodes::Ordinary::OP_PUSHDATA4) => 5, + _ => 1 + }; + } } - } - haystack.truncate(top + needle.len()); - n_deleted + haystack.truncate(top + needle.len()); + n_deleted } #[cfg(test)] mod tests { - use std::prelude::*; + use std::prelude::*; - use super::script_find_and_remove; - use super::hex_bytes; + use super::script_find_and_remove; + use super::hex_bytes; - #[test] - fn test_script_find_and_remove() { - let mut v = vec![101u8, 102, 103, 104, 102, 103, 104, 102, 103, 104, 105, 106, 107, 108, 109]; + #[test] + fn test_script_find_and_remove() { + let mut v = vec![101u8, 102, 103, 104, 102, 103, 104, 102, 103, 104, 105, 106, 107, 108, 109]; - assert_eq!(script_find_and_remove(&mut v, []), 0); - assert_eq!(script_find_and_remove(&mut v, [105, 105, 105]), 0); - assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103, 104, 105, 106, 107, 108, 109]); + assert_eq!(script_find_and_remove(&mut v, []), 0); + assert_eq!(script_find_and_remove(&mut v, [105, 105, 105]), 0); + assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103, 104, 105, 106, 107, 108, 109]); - assert_eq!(script_find_and_remove(&mut v, [105, 106, 107]), 1); - assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103, 104, 108, 109]); + assert_eq!(script_find_and_remove(&mut v, [105, 106, 107]), 1); + assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103, 104, 108, 109]); - assert_eq!(script_find_and_remove(&mut v, [104, 108, 109]), 1); - assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103]); + assert_eq!(script_find_and_remove(&mut v, [104, 108, 109]), 1); + assert_eq!(v, vec![101, 102, 103, 104, 102, 103, 104, 102, 103]); - assert_eq!(script_find_and_remove(&mut v, [101]), 1); - assert_eq!(v, vec![102, 103, 104, 102, 103, 104, 102, 103]); + assert_eq!(script_find_and_remove(&mut v, [101]), 1); + assert_eq!(v, vec![102, 103, 104, 102, 103, 104, 102, 103]); - assert_eq!(script_find_and_remove(&mut v, [102]), 3); - assert_eq!(v, vec![103, 104, 103, 104, 103]); + assert_eq!(script_find_and_remove(&mut v, [102]), 3); + assert_eq!(v, vec![103, 104, 103, 104, 103]); - assert_eq!(script_find_and_remove(&mut v, [103, 104]), 2); - assert_eq!(v, vec![103]); + assert_eq!(script_find_and_remove(&mut v, [103, 104]), 2); + assert_eq!(v, vec![103]); - assert_eq!(script_find_and_remove(&mut v, [105, 105, 5]), 0); - assert_eq!(script_find_and_remove(&mut v, [105]), 0); - assert_eq!(script_find_and_remove(&mut v, [103]), 1); - assert_eq!(v, vec![]); + assert_eq!(script_find_and_remove(&mut v, [105, 105, 5]), 0); + assert_eq!(script_find_and_remove(&mut v, [105]), 0); + assert_eq!(script_find_and_remove(&mut v, [103]), 1); + assert_eq!(v, vec![]); - assert_eq!(script_find_and_remove(&mut v, [105, 105, 5]), 0); - assert_eq!(script_find_and_remove(&mut v, [105]), 0); - } + assert_eq!(script_find_and_remove(&mut v, [105, 105, 5]), 0); + assert_eq!(script_find_and_remove(&mut v, [105]), 0); + } - #[test] - fn test_script_codesep_remove() { - let mut s = vec![33u8, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 171, 33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 171, 81]; - assert_eq!(script_find_and_remove(&mut s, [171]), 2); - assert_eq!(s, vec![33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 81]); - } + #[test] + fn test_script_codesep_remove() { + let mut s = vec![33u8, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 171, 33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 171, 81]; + assert_eq!(script_find_and_remove(&mut s, [171]), 2); + assert_eq!(s, vec![33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 33, 3, 132, 121, 160, 250, 153, 140, 211, 82, 89, 162, 239, 10, 122, 92, 104, 102, 44, 20, 116, 248, 140, 203, 109, 8, 167, 103, 123, 190, 199, 242, 32, 65, 173, 81]); + } - #[test] - fn test_hex_bytes() { - assert_eq!(&hex_bytes("abcd").unwrap(), &[171u8, 205]); - assert!(hex_bytes("abcde").is_err()); - assert!(hex_bytes("aBcDeF").is_ok()); - assert!(hex_bytes("aBcD4eFL").is_err()); - } + #[test] + fn test_hex_bytes() { + assert_eq!(&hex_bytes("abcd").unwrap(), &[171u8, 205]); + assert!(hex_bytes("abcde").is_err()); + assert!(hex_bytes("aBcDeF").is_ok()); + assert!(hex_bytes("aBcD4eFL").is_err()); + } } diff --git a/src/util/patricia_tree.rs b/src/util/patricia_tree.rs index 11ecb5ae..8c83bbe1 100644 --- a/src/util/patricia_tree.rs +++ b/src/util/patricia_tree.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -31,691 +31,691 @@ use util::BitArray; /// Patricia troo pub struct PatriciaTree { - data: Option, - child_l: Option>>, - child_r: Option>>, - skip_prefix: K, - skip_len: u8 + data: Option, + child_l: Option>>, + child_r: Option>>, + skip_prefix: K, + skip_len: u8 } impl PatriciaTree - where K: BitArray + cmp::Eq + Zero + One + - ops::BitXor + - ops::Add + - ops::Shr + - ops::Shl + where K: BitArray + cmp::Eq + Zero + One + + ops::BitXor + + ops::Add + + ops::Shr + + ops::Shl { - /// Constructs a new Patricia tree - pub fn new() -> PatriciaTree { - PatriciaTree { - data: None, - child_l: None, - child_r: None, - skip_prefix: Zero::zero(), - skip_len: 0 + /// Constructs a new Patricia tree + pub fn new() -> PatriciaTree { + PatriciaTree { + data: None, + child_l: None, + child_r: None, + skip_prefix: Zero::zero(), + skip_len: 0 + } } - } - /// Lookup a value by exactly matching `key` and return a referenc - pub fn lookup_mut<'a>(&'a mut self, key: &K, key_len: usize) -> Option<&'a mut V> { - // Caution: `lookup_mut` never modifies its self parameter (in fact its - // internal recursion uses a non-mutable self, so we are OK to just - // transmute our self pointer into a mutable self before passing it in. - use std::mem::transmute; - unsafe { transmute(self.lookup(key, key_len)) } - } + /// Lookup a value by exactly matching `key` and return a referenc + pub fn lookup_mut<'a>(&'a mut self, key: &K, key_len: usize) -> Option<&'a mut V> { + // Caution: `lookup_mut` never modifies its self parameter (in fact its + // internal recursion uses a non-mutable self, so we are OK to just + // transmute our self pointer into a mutable self before passing it in. + use std::mem::transmute; + unsafe { transmute(self.lookup(key, key_len)) } + } - /// Lookup a value by exactly matching `key` and return a mutable reference - pub fn lookup<'a>(&'a self, key: &K, key_len: usize) -> Option<&'a V> { - let mut node = self; - let mut key_idx = 0; + /// Lookup a value by exactly matching `key` and return a mutable reference + pub fn lookup<'a>(&'a self, key: &K, key_len: usize) -> Option<&'a V> { + let mut node = self; + let mut key_idx = 0; - loop { - // If the search key is shorter than the node prefix, there is no - // way we can match, so fail. - if key_len - key_idx < node.skip_len as usize { - return None; - } + loop { + // If the search key is shorter than the node prefix, there is no + // way we can match, so fail. + if key_len - key_idx < node.skip_len as usize { + return None; + } - // Key fails to match prefix --- no match - if node.skip_prefix != key.bit_slice(key_idx, key_idx + node.skip_len as usize) { - return None; - } + // Key fails to match prefix --- no match + if node.skip_prefix != key.bit_slice(key_idx, key_idx + node.skip_len as usize) { + return None; + } - // Key matches prefix: if they are an exact match, return the data - if node.skip_len as usize == key_len - key_idx { - return node.data.as_ref(); - } else { - // Key matches prefix: search key longer than node key, recurse - key_idx += 1 + node.skip_len as usize; - let subtree = if key.bit(key_idx - 1) { &node.child_r } else { &node.child_l }; - match subtree { - &Some(ref bx) => { - node = &**bx; // bx is a &Box here, so &**bx gets &U - } - &None => { return None; } - } - } - } // end loop - } - - /// Inserts a value with key `key`, returning true on success. If a value is already - /// stored against `key`, do nothing and return false. - #[inline] - pub fn insert(&mut self, key: &K, key_len: usize, value: V) -> bool { - self.real_insert(key, key_len, value, false) - } - - /// Inserts a value with key `key`, returning true on success. If a value is already - /// stored against `key`, overwrite it and return false. - #[inline] - pub fn insert_or_update(&mut self, key: &K, key_len: usize, value: V) -> bool { - self.real_insert(key, key_len, value, true) - } - - fn real_insert(&mut self, key: &K, key_len: usize, value: V, overwrite: bool) -> bool { - let mut node = self; - let mut idx = 0; - loop { - // Mask in case search key is shorter than node key - let slice_len = cmp::min(node.skip_len as usize, key_len - idx); - let masked_prefix = node.skip_prefix.mask(slice_len); - let key_slice = key.bit_slice(idx, idx + slice_len); - - // Prefixes do not match: split key - if masked_prefix != key_slice { - let diff = (masked_prefix ^ key_slice).trailing_zeros(); - - // Remove the old node's children - let child_l = node.child_l.take(); - let child_r = node.child_r.take(); - let value_neighbor = node.data.take(); - let tmp = node; // borrowck hack - let (insert, neighbor) = if key_slice.bit(diff) - { (&mut tmp.child_r, &mut tmp.child_l) } - else { (&mut tmp.child_l, &mut tmp.child_r) }; - *insert = Some(Box::new(PatriciaTree { - data: None, - child_l: None, - child_r: None, - skip_prefix: key.bit_slice(idx + diff + 1, key_len), - skip_len: (key_len - idx - diff - 1) as u8 - })); - *neighbor = Some(Box::new(PatriciaTree { - data: value_neighbor, - child_l: child_l, - child_r: child_r, - skip_prefix: tmp.skip_prefix >> (diff + 1), - skip_len: tmp.skip_len - diff as u8 - 1 - })); - // Chop the prefix down - tmp.skip_len = diff as u8; - tmp.skip_prefix = tmp.skip_prefix.mask(diff); - // Recurse - idx += 1 + diff; - node = &mut **insert.as_mut().unwrap(); - } - // Prefixes match - else { - let slice_len = key_len - idx; - // Search key is shorter than skip prefix: truncate the prefix and attach - // the old data as a child - if node.skip_len as usize > slice_len { - // Remove the old node's children - let child_l = node.child_l.take(); - let child_r = node.child_r.take(); - let value_neighbor = node.data.take(); - // Put the old data in a new child, with the remainder of the prefix - let new_child = if node.skip_prefix.bit(slice_len) - { &mut node.child_r } else { &mut node.child_l }; - *new_child = Some(Box::new(PatriciaTree { - data: value_neighbor, - child_l: child_l, - child_r: child_r, - skip_prefix: node.skip_prefix >> (slice_len + 1), - skip_len: node.skip_len - slice_len as u8 - 1 - })); - // Chop the prefix down and put the new data in place - node.skip_len = slice_len as u8; - node.skip_prefix = key_slice; - node.data = Some(value); - return true; - } - // If we have an exact match, great, insert it - else if node.skip_len as usize == slice_len { - if node.data.is_none() { - node.data = Some(value); - return true; - } - if overwrite { - node.data = Some(value); - } - return false; - } - // Search key longer than node key, recurse - else { - let tmp = node; // hack to appease borrowck - idx += tmp.skip_len as usize + 1; - let subtree = if key.bit(idx - 1) - { &mut tmp.child_r } else { &mut tmp.child_l }; - // Recurse, adding a new node if necessary - if subtree.is_none() { - *subtree = Some(Box::new(PatriciaTree { - data: None, - child_l: None, - child_r: None, - skip_prefix: key.bit_slice(idx, key_len), - skip_len: key_len as u8 - idx as u8 - })); - } - // subtree.get_mut_ref is a &mut Box here, so &mut ** gets a &mut U - node = &mut **subtree.as_mut().unwrap(); - } // end search_len vs prefix len - } // end if prefixes match - } // end loop - } - - /// Deletes a value with key `key`, returning it on success. If no value with - /// the given key is found, return None - pub fn delete(&mut self, key: &K, key_len: usize) -> Option { - /// Return value is (deletable, actual return value), where `deletable` is true - /// is true when the entire node can be deleted (i.e. it has no children) - fn recurse(tree: &mut PatriciaTree, key: &K, key_len: usize) -> (bool, Option) - where K: BitArray + cmp::Eq + Zero + One + - ops::Add + - ops::Shr + - ops::Shl - { - // If the search key is shorter than the node prefix, there is no - // way we can match, so fail. - if key_len < tree.skip_len as usize { - return (false, None); - } - - // Key fails to match prefix --- no match - if tree.skip_prefix != key.mask(tree.skip_len as usize) { - return (false, None); - } - - // If we are here, the key matches the prefix - if tree.skip_len as usize == key_len { - // Exact match -- delete and return - let ret = tree.data.take(); - let bit = tree.child_r.is_some(); - // First try to consolidate if there is only one child - if tree.child_l.is_some() && tree.child_r.is_some() { - // Two children means we cannot consolidate or delete - return (false, ret); - } - match (tree.child_l.take(), tree.child_r.take()) { - (Some(_), Some(_)) => unreachable!(), - (Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len }), None) | - (None, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len })) => { - tree.data = data; - tree.child_l = child_l; - tree.child_r = child_r; - let new_bit = if bit { let ret: K = One::one(); - ret << (tree.skip_len as usize) } - else { Zero::zero() }; - tree.skip_prefix = tree.skip_prefix + - new_bit + - (skip_prefix << (1 + tree.skip_len as usize)); - tree.skip_len += 1 + skip_len; - return (false, ret); - } - // No children means this node is deletable - (None, None) => { return (true, ret); } - } - } - - // Otherwise, the key is longer than the prefix and we need to recurse - let next_bit = key.bit(tree.skip_len as usize); - // Recursively get the return value. This awkward scope is required - // to shorten the time we mutably borrow the node's children -- we - // might want to borrow the sibling later, so the borrow needs to end. - let ret = { - let target = if next_bit { &mut tree.child_r } else { &mut tree.child_l }; - - // If we can't recurse, fail - if target.is_none() { - return (false, None); - } - // Otherwise, do it - let (delete_child, ret) = recurse(&mut **target.as_mut().unwrap(), - &(*key >> (tree.skip_len as usize + 1)), - key_len - tree.skip_len as usize - 1); - if delete_child { - target.take(); + // Key matches prefix: if they are an exact match, return the data + if node.skip_len as usize == key_len - key_idx { + return node.data.as_ref(); + } else { + // Key matches prefix: search key longer than node key, recurse + key_idx += 1 + node.skip_len as usize; + let subtree = if key.bit(key_idx - 1) { &node.child_r } else { &node.child_l }; + match subtree { + &Some(ref bx) => { + node = &**bx; // bx is a &Box here, so &**bx gets &U + } + &None => { return None; } + } + } + } // end loop + } + + /// Inserts a value with key `key`, returning true on success. If a value is already + /// stored against `key`, do nothing and return false. + #[inline] + pub fn insert(&mut self, key: &K, key_len: usize, value: V) -> bool { + self.real_insert(key, key_len, value, false) + } + + /// Inserts a value with key `key`, returning true on success. If a value is already + /// stored against `key`, overwrite it and return false. + #[inline] + pub fn insert_or_update(&mut self, key: &K, key_len: usize, value: V) -> bool { + self.real_insert(key, key_len, value, true) + } + + fn real_insert(&mut self, key: &K, key_len: usize, value: V, overwrite: bool) -> bool { + let mut node = self; + let mut idx = 0; + loop { + // Mask in case search key is shorter than node key + let slice_len = cmp::min(node.skip_len as usize, key_len - idx); + let masked_prefix = node.skip_prefix.mask(slice_len); + let key_slice = key.bit_slice(idx, idx + slice_len); + + // Prefixes do not match: split key + if masked_prefix != key_slice { + let diff = (masked_prefix ^ key_slice).trailing_zeros(); + + // Remove the old node's children + let child_l = node.child_l.take(); + let child_r = node.child_r.take(); + let value_neighbor = node.data.take(); + let tmp = node; // borrowck hack + let (insert, neighbor) = if key_slice.bit(diff) + { (&mut tmp.child_r, &mut tmp.child_l) } + else { (&mut tmp.child_l, &mut tmp.child_r) }; + *insert = Some(Box::new(PatriciaTree { + data: None, + child_l: None, + child_r: None, + skip_prefix: key.bit_slice(idx + diff + 1, key_len), + skip_len: (key_len - idx - diff - 1) as u8 + })); + *neighbor = Some(Box::new(PatriciaTree { + data: value_neighbor, + child_l: child_l, + child_r: child_r, + skip_prefix: tmp.skip_prefix >> (diff + 1), + skip_len: tmp.skip_len - diff as u8 - 1 + })); + // Chop the prefix down + tmp.skip_len = diff as u8; + tmp.skip_prefix = tmp.skip_prefix.mask(diff); + // Recurse + idx += 1 + diff; + node = &mut **insert.as_mut().unwrap(); + } + // Prefixes match + else { + let slice_len = key_len - idx; + // Search key is shorter than skip prefix: truncate the prefix and attach + // the old data as a child + if node.skip_len as usize > slice_len { + // Remove the old node's children + let child_l = node.child_l.take(); + let child_r = node.child_r.take(); + let value_neighbor = node.data.take(); + // Put the old data in a new child, with the remainder of the prefix + let new_child = if node.skip_prefix.bit(slice_len) + { &mut node.child_r } else { &mut node.child_l }; + *new_child = Some(Box::new(PatriciaTree { + data: value_neighbor, + child_l: child_l, + child_r: child_r, + skip_prefix: node.skip_prefix >> (slice_len + 1), + skip_len: node.skip_len - slice_len as u8 - 1 + })); + // Chop the prefix down and put the new data in place + node.skip_len = slice_len as u8; + node.skip_prefix = key_slice; + node.data = Some(value); + return true; + } + // If we have an exact match, great, insert it + else if node.skip_len as usize == slice_len { + if node.data.is_none() { + node.data = Some(value); + return true; + } + if overwrite { + node.data = Some(value); + } + return false; + } + // Search key longer than node key, recurse + else { + let tmp = node; // hack to appease borrowck + idx += tmp.skip_len as usize + 1; + let subtree = if key.bit(idx - 1) + { &mut tmp.child_r } else { &mut tmp.child_l }; + // Recurse, adding a new node if necessary + if subtree.is_none() { + *subtree = Some(Box::new(PatriciaTree { + data: None, + child_l: None, + child_r: None, + skip_prefix: key.bit_slice(idx, key_len), + skip_len: key_len as u8 - idx as u8 + })); + } + // subtree.get_mut_ref is a &mut Box here, so &mut ** gets a &mut U + node = &mut **subtree.as_mut().unwrap(); + } // end search_len vs prefix len + } // end if prefixes match + } // end loop + } + + /// Deletes a value with key `key`, returning it on success. If no value with + /// the given key is found, return None + pub fn delete(&mut self, key: &K, key_len: usize) -> Option { + /// Return value is (deletable, actual return value), where `deletable` is true + /// is true when the entire node can be deleted (i.e. it has no children) + fn recurse(tree: &mut PatriciaTree, key: &K, key_len: usize) -> (bool, Option) + where K: BitArray + cmp::Eq + Zero + One + + ops::Add + + ops::Shr + + ops::Shl + { + // If the search key is shorter than the node prefix, there is no + // way we can match, so fail. + if key_len < tree.skip_len as usize { + return (false, None); + } + + // Key fails to match prefix --- no match + if tree.skip_prefix != key.mask(tree.skip_len as usize) { + return (false, None); + } + + // If we are here, the key matches the prefix + if tree.skip_len as usize == key_len { + // Exact match -- delete and return + let ret = tree.data.take(); + let bit = tree.child_r.is_some(); + // First try to consolidate if there is only one child + if tree.child_l.is_some() && tree.child_r.is_some() { + // Two children means we cannot consolidate or delete + return (false, ret); + } + match (tree.child_l.take(), tree.child_r.take()) { + (Some(_), Some(_)) => unreachable!(), + (Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len }), None) | + (None, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len })) => { + tree.data = data; + tree.child_l = child_l; + tree.child_r = child_r; + let new_bit = if bit { let ret: K = One::one(); + ret << (tree.skip_len as usize) } + else { Zero::zero() }; + tree.skip_prefix = tree.skip_prefix + + new_bit + + (skip_prefix << (1 + tree.skip_len as usize)); + tree.skip_len += 1 + skip_len; + return (false, ret); + } + // No children means this node is deletable + (None, None) => { return (true, ret); } + } + } + + // Otherwise, the key is longer than the prefix and we need to recurse + let next_bit = key.bit(tree.skip_len as usize); + // Recursively get the return value. This awkward scope is required + // to shorten the time we mutably borrow the node's children -- we + // might want to borrow the sibling later, so the borrow needs to end. + let ret = { + let target = if next_bit { &mut tree.child_r } else { &mut tree.child_l }; + + // If we can't recurse, fail + if target.is_none() { + return (false, None); + } + // Otherwise, do it + let (delete_child, ret) = recurse(&mut **target.as_mut().unwrap(), + &(*key >> (tree.skip_len as usize + 1)), + key_len - tree.skip_len as usize - 1); + if delete_child { + target.take(); + } + ret + }; + + // The above block may have deleted the target. If we now have only one + // child, merge it into the parent. (If we have no children, mark this + // node for deletion.) + if tree.data.is_some() { + // First though, if this is a data node, we can neither delete nor + // consolidate it. + return (false, ret); + } + + match (tree.child_r.is_some(), tree.child_l.take(), tree.child_r.take()) { + // Two children? Can't do anything, just sheepishly put them back + (_, Some(child_l), Some(child_r)) => { + tree.child_l = Some(child_l); + tree.child_r = Some(child_r); + return (false, ret); + } + // One child? Consolidate + (bit, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len }), None) | + (bit, None, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len })) => { + tree.data = data; + tree.child_l = child_l; + tree.child_r = child_r; + let new_bit = if bit { let ret: K = One::one(); + ret << (tree.skip_len as usize) } + else { Zero::zero() }; + tree.skip_prefix = tree.skip_prefix + + new_bit + + (skip_prefix << (1 + tree.skip_len as usize)); + tree.skip_len += 1 + skip_len; + return (false, ret); + } + // No children? Delete + (_, None, None) => { + return (true, ret); + } + } } + let (_, ret) = recurse(self, key, key_len); ret - }; + } - // The above block may have deleted the target. If we now have only one - // child, merge it into the parent. (If we have no children, mark this - // node for deletion.) - if tree.data.is_some() { - // First though, if this is a data node, we can neither delete nor - // consolidate it. - return (false, ret); - } - - match (tree.child_r.is_some(), tree.child_l.take(), tree.child_r.take()) { - // Two children? Can't do anything, just sheepishly put them back - (_, Some(child_l), Some(child_r)) => { - tree.child_l = Some(child_l); - tree.child_r = Some(child_r); - return (false, ret); + /// Count all the nodes + pub fn node_count(&self) -> usize { + fn recurse(node: &Option>>) -> usize { + match node { + &Some(ref node) => { 1 + recurse(&node.child_l) + recurse(&node.child_r) } + &None => 0 + } } - // One child? Consolidate - (bit, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len }), None) | - (bit, None, Some(box PatriciaTree { data, child_l, child_r, skip_prefix, skip_len })) => { - tree.data = data; - tree.child_l = child_l; - tree.child_r = child_r; - let new_bit = if bit { let ret: K = One::one(); - ret << (tree.skip_len as usize) } - else { Zero::zero() }; - tree.skip_prefix = tree.skip_prefix + - new_bit + - (skip_prefix << (1 + tree.skip_len as usize)); - tree.skip_len += 1 + skip_len; - return (false, ret); + 1 + recurse(&self.child_l) + recurse(&self.child_r) + } + + /// Returns an iterator over all elements in the tree + pub fn iter<'a>(&'a self) -> Items<'a, K, V> { + Items { + node: Some(self), + parents: vec![], + started: false } - // No children? Delete - (_, None, None) => { - return (true, ret); + } + + /// Returns a mutable iterator over all elements in the tree + pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a, K, V> { + MutItems { + node: self as *mut _, + parents: vec![], + started: false, + marker: marker::PhantomData } - } } - let (_, ret) = recurse(self, key, key_len); - ret - } - - /// Count all the nodes - pub fn node_count(&self) -> usize { - fn recurse(node: &Option>>) -> usize { - match node { - &Some(ref node) => { 1 + recurse(&node.child_l) + recurse(&node.child_r) } - &None => 0 - } - } - 1 + recurse(&self.child_l) + recurse(&self.child_r) - } - - /// Returns an iterator over all elements in the tree - pub fn iter<'a>(&'a self) -> Items<'a, K, V> { - Items { - node: Some(self), - parents: vec![], - started: false - } - } - - /// Returns a mutable iterator over all elements in the tree - pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a, K, V> { - MutItems { - node: self as *mut _, - parents: vec![], - started: false, - marker: marker::PhantomData - } - } } impl Debug for PatriciaTree { - /// Print the entire tree - pub fn fmt<'a>(&'a self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - fn recurse<'a, K:BitArray, V:Debug>(tree: &'a PatriciaTree, f: &mut fmt::Formatter, depth: usize) -> Result<(), fmt::Error> { - for i in 0..tree.skip_len as usize { - try!(write!(f, "{:}", if tree.skip_prefix.bit(i) { 1 } else { 0 })); - } - try!(writeln!(f, ": {:?}", tree.data)); - // left gets no indentation - match tree.child_l { - Some(ref t) => { - for _ in 0..(depth + tree.skip_len as usize) { - try!(write!(f, "-")); - } - try!(write!(f, "0")); - try!(recurse(&**t, f, depth + tree.skip_len as usize + 1)); + /// Print the entire tree + pub fn fmt<'a>(&'a self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + fn recurse<'a, K:BitArray, V:Debug>(tree: &'a PatriciaTree, f: &mut fmt::Formatter, depth: usize) -> Result<(), fmt::Error> { + for i in 0..tree.skip_len as usize { + try!(write!(f, "{:}", if tree.skip_prefix.bit(i) { 1 } else { 0 })); + } + try!(writeln!(f, ": {:?}", tree.data)); + // left gets no indentation + match tree.child_l { + Some(ref t) => { + for _ in 0..(depth + tree.skip_len as usize) { + try!(write!(f, "-")); + } + try!(write!(f, "0")); + try!(recurse(&**t, f, depth + tree.skip_len as usize + 1)); + } + None => { } + } + // right one gets indentation + match tree.child_r { + Some(ref t) => { + for _ in 0..(depth + tree.skip_len as usize) { + try!(write!(f, "_")); + } + try!(write!(f, "1")); + try!(recurse(&**t, f, depth + tree.skip_len as usize + 1)); + } + None => { } + } + Ok(()) } - None => { } - } - // right one gets indentation - match tree.child_r { - Some(ref t) => { - for _ in 0..(depth + tree.skip_len as usize) { - try!(write!(f, "_")); - } - try!(write!(f, "1")); - try!(recurse(&**t, f, depth + tree.skip_len as usize + 1)); - } - None => { } - } - Ok(()) + recurse(self, f, 0); } - recurse(self, f, 0); - } } impl ConsensusEncodable for PatriciaTree - where S: SimpleEncoder, - K: ConsensusEncodable, - V: ConsensusEncodable + where S: SimpleEncoder, + K: ConsensusEncodable, + V: ConsensusEncodable { - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - // Depth-first serialization: serialize self, then children - try!(self.skip_prefix.consensus_encode(s)); - try!(self.skip_len.consensus_encode(s)); - try!(self.data.consensus_encode(s)); - try!(self.child_l.consensus_encode(s)); - try!(self.child_r.consensus_encode(s)); - Ok(()) - } + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + // Depth-first serialization: serialize self, then children + try!(self.skip_prefix.consensus_encode(s)); + try!(self.skip_len.consensus_encode(s)); + try!(self.data.consensus_encode(s)); + try!(self.child_l.consensus_encode(s)); + try!(self.child_r.consensus_encode(s)); + Ok(()) + } } impl ConsensusDecodable for PatriciaTree - where D: SimpleDecoder, - K: ConsensusDecodable, - V: ConsensusDecodable + where D: SimpleDecoder, + K: ConsensusDecodable, + V: ConsensusDecodable { - fn consensus_decode(d: &mut D) -> Result, D::Error> { - Ok(PatriciaTree { - skip_prefix: try!(ConsensusDecodable::consensus_decode(d)), - skip_len: try!(ConsensusDecodable::consensus_decode(d)), - data: try!(ConsensusDecodable::consensus_decode(d)), - child_l: try!(ConsensusDecodable::consensus_decode(d)), - child_r: try!(ConsensusDecodable::consensus_decode(d)) - }) - } + fn consensus_decode(d: &mut D) -> Result, D::Error> { + Ok(PatriciaTree { + skip_prefix: try!(ConsensusDecodable::consensus_decode(d)), + skip_len: try!(ConsensusDecodable::consensus_decode(d)), + data: try!(ConsensusDecodable::consensus_decode(d)), + child_l: try!(ConsensusDecodable::consensus_decode(d)), + child_r: try!(ConsensusDecodable::consensus_decode(d)) + }) + } } /// Iterator pub struct Items<'tree, K: 'tree, V: 'tree> { - started: bool, - node: Option<&'tree PatriciaTree>, - parents: Vec<&'tree PatriciaTree> + started: bool, + node: Option<&'tree PatriciaTree>, + parents: Vec<&'tree PatriciaTree> } /// Mutable iterator pub struct MutItems<'tree, K: 'tree, V: 'tree> { - started: bool, - node: *mut PatriciaTree, - parents: Vec<*mut PatriciaTree>, - marker: marker::PhantomData<&'tree PatriciaTree> + started: bool, + node: *mut PatriciaTree, + parents: Vec<*mut PatriciaTree>, + marker: marker::PhantomData<&'tree PatriciaTree> } impl<'a, K, V> Iterator for Items<'a, K, V> { - type Item = &'a V; + type Item = &'a V; - fn next(&mut self) -> Option<&'a V> { - fn borrow_opt<'a, K, V>(opt_ptr: &'a Option>>) -> Option<&'a PatriciaTree> { - opt_ptr.as_ref().map(|b| &**b) - } - - // If we haven't started, maybe return the "last" return value, - // which will be the root node. - if !self.started { - if self.node.is_some() && (**self.node.as_ref().unwrap()).data.is_some() { - return self.node.unwrap().data.as_ref(); - } - self.started = true; - } - - // Find next data-containing node - while self.node.is_some() { - let mut node = self.node.take(); - // Try to go left - let child_l = borrow_opt(&node.unwrap().child_l); - if child_l.is_some() { - self.parents.push(node.unwrap()); - self.node = child_l; - // Try to go right, going back up the tree if necessary - } else { - while node.is_some() { - let child_r = borrow_opt(&node.unwrap().child_r); - if child_r.is_some() { - self.node = child_r; - break; - } - node = self.parents.pop(); + fn next(&mut self) -> Option<&'a V> { + fn borrow_opt<'a, K, V>(opt_ptr: &'a Option>>) -> Option<&'a PatriciaTree> { + opt_ptr.as_ref().map(|b| &**b) } - } - // Stop if we've found data. - if self.node.is_some() && self.node.unwrap().data.is_some() { - break; - } - } // end loop - // Return data - self.node.and_then(|node| node.data.as_ref()) - } + + // If we haven't started, maybe return the "last" return value, + // which will be the root node. + if !self.started { + if self.node.is_some() && (**self.node.as_ref().unwrap()).data.is_some() { + return self.node.unwrap().data.as_ref(); + } + self.started = true; + } + + // Find next data-containing node + while self.node.is_some() { + let mut node = self.node.take(); + // Try to go left + let child_l = borrow_opt(&node.unwrap().child_l); + if child_l.is_some() { + self.parents.push(node.unwrap()); + self.node = child_l; + // Try to go right, going back up the tree if necessary + } else { + while node.is_some() { + let child_r = borrow_opt(&node.unwrap().child_r); + if child_r.is_some() { + self.node = child_r; + break; + } + node = self.parents.pop(); + } + } + // Stop if we've found data. + if self.node.is_some() && self.node.unwrap().data.is_some() { + break; + } + } // end loop + // Return data + self.node.and_then(|node| node.data.as_ref()) + } } impl<'a, K, V> Iterator for MutItems<'a, K, V> { - type Item = &'a mut V; + type Item = &'a mut V; - fn next(&mut self) -> Option<&'a mut V> { - fn borrow_opt<'a, K, V>(opt_ptr: &'a Option>>) -> *mut PatriciaTree { - match *opt_ptr { - Some(ref data) => &**data as *const _ as *mut _, - None => ptr::null_mut() - } - } - - // If we haven't started, maybe return the "last" return value, - // which will be the root node. - if !self.started { - unsafe { - if !self.node.is_null() && (*self.node).data.is_some() { - return (*self.node).data.as_mut(); + fn next(&mut self) -> Option<&'a mut V> { + fn borrow_opt<'a, K, V>(opt_ptr: &'a Option>>) -> *mut PatriciaTree { + match *opt_ptr { + Some(ref data) => &**data as *const _ as *mut _, + None => ptr::null_mut() + } } - } - self.started = true; - } - // Find next data-containing node - while !self.node.is_null() { - // Try to go left - let child_l = unsafe { borrow_opt(&(*self.node).child_l) }; - if !child_l.is_null() { - self.parents.push(self.node); - self.node = child_l; - // Try to go right, going back up the tree if necessary - } else { + // If we haven't started, maybe return the "last" return value, + // which will be the root node. + if !self.started { + unsafe { + if !self.node.is_null() && (*self.node).data.is_some() { + return (*self.node).data.as_mut(); + } + } + self.started = true; + } + + // Find next data-containing node while !self.node.is_null() { - let child_r = unsafe { borrow_opt(&(*self.node).child_r) }; - if !child_r.is_null() { - self.node = child_r; - break; - } - self.node = self.parents.pop().unwrap_or(ptr::null_mut()); + // Try to go left + let child_l = unsafe { borrow_opt(&(*self.node).child_l) }; + if !child_l.is_null() { + self.parents.push(self.node); + self.node = child_l; + // Try to go right, going back up the tree if necessary + } else { + while !self.node.is_null() { + let child_r = unsafe { borrow_opt(&(*self.node).child_r) }; + if !child_r.is_null() { + self.node = child_r; + break; + } + self.node = self.parents.pop().unwrap_or(ptr::null_mut()); + } + } + // Stop if we've found data. + if !self.node.is_null() && unsafe { (*self.node).data.is_some() } { + break; + } + } // end loop + // Return data + if !self.node.is_null() { + unsafe { (*self.node).data.as_mut() } + } else { + None } - } - // Stop if we've found data. - if !self.node.is_null() && unsafe { (*self.node).data.is_some() } { - break; - } - } // end loop - // Return data - if !self.node.is_null() { - unsafe { (*self.node).data.as_mut() } - } else { - None } - } } #[cfg(test)] mod tests { - use std::prelude::*; - use std::io; - use std::num::Zero; + use std::prelude::*; + use std::io; + use std::num::Zero; - use network::serialize::{deserialize, serialize}; - use util::hash::Sha256dHash; - use util::uint::Uint128; - use util::uint::Uint256; - use util::patricia_tree::PatriciaTree; + use network::serialize::{deserialize, serialize}; + use util::hash::Sha256dHash; + use util::uint::Uint128; + use util::uint::Uint256; + use util::patricia_tree::PatriciaTree; - #[test] - fn patricia_single_insert_lookup_delete_test() { - let mut key: Uint256 = FromPrimitive::from_u64(0xDEADBEEFDEADBEEF).unwrap(); - key = key + (key << 64); + #[test] + fn patricia_single_insert_lookup_delete_test() { + let mut key: Uint256 = FromPrimitive::from_u64(0xDEADBEEFDEADBEEF).unwrap(); + key = key + (key << 64); - let mut tree = PatriciaTree::new(); - tree.insert(&key, 100, 100u32); - tree.insert(&key, 120, 100u32); + let mut tree = PatriciaTree::new(); + tree.insert(&key, 100, 100u32); + tree.insert(&key, 120, 100u32); - assert_eq!(tree.lookup(&key, 100), Some(&100u32)); - assert_eq!(tree.lookup(&key, 101), None); - assert_eq!(tree.lookup(&key, 99), None); - assert_eq!(tree.delete(&key, 100), Some(100u32)); - } - - #[test] - fn patricia_insert_lookup_delete_test() { - let mut tree = PatriciaTree::new(); - let mut hashes = vec![]; - for i in 0u32..5000 { - let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); - tree.insert(&hash, 250, i); - hashes.push(hash); + assert_eq!(tree.lookup(&key, 100), Some(&100u32)); + assert_eq!(tree.lookup(&key, 101), None); + assert_eq!(tree.lookup(&key, 99), None); + assert_eq!(tree.delete(&key, 100), Some(100u32)); } - // Check that all inserts are correct - for (n, hash) in hashes.iter().enumerate() { - let ii = n as u32; - let ret = tree.lookup(hash, 250); - assert_eq!(ret, Some(&ii)); + #[test] + fn patricia_insert_lookup_delete_test() { + let mut tree = PatriciaTree::new(); + let mut hashes = vec![]; + for i in 0u32..5000 { + let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); + tree.insert(&hash, 250, i); + hashes.push(hash); + } + + // Check that all inserts are correct + for (n, hash) in hashes.iter().enumerate() { + let ii = n as u32; + let ret = tree.lookup(hash, 250); + assert_eq!(ret, Some(&ii)); + } + + // Delete all the odd-numbered nodes + for (n, hash) in hashes.iter().enumerate() { + if n % 2 == 1 { + let ii = n as u32; + let ret = tree.delete(hash, 250); + assert_eq!(ret, Some(ii)); + } + } + + // Confirm all is correct + for (n, hash) in hashes.iter().enumerate() { + let ii = n as u32; + let ret = tree.lookup(hash, 250); + if n % 2 == 0 { + assert_eq!(ret, Some(&ii)); + } else { + assert_eq!(ret, None); + } + } } - // Delete all the odd-numbered nodes - for (n, hash) in hashes.iter().enumerate() { - if n % 2 == 1 { - let ii = n as u32; - let ret = tree.delete(hash, 250); - assert_eq!(ret, Some(ii)); - } + #[test] + fn patricia_insert_substring_keys() { + // This test uses a bunch of keys that are substrings of each other + // to make sure insertion and deletion does not lose data + let mut tree = PatriciaTree::new(); + let mut hashes = vec![]; + // Start by inserting a bunch of chunder + for i in 1u32..500 { + let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); + tree.insert(&hash, 128, i * 1000); + hashes.push(hash); + } + // Do the actual test -- note that we also test insertion and deletion + // at the root here. + for i in 0u32..10 { + tree.insert(&Zero::zero(), i as usize, i); + } + for i in 0u32..10 { + let m = tree.lookup(&Zero::zero(), i as usize); + assert_eq!(m, Some(&i)); + } + for i in 0u32..10 { + let m = tree.delete(&Zero::zero(), i as usize); + assert_eq!(m, Some(i)); + } + // Check that the chunder was unharmed + for (n, hash) in hashes.iter().enumerate() { + let ii = ((n + 1) * 1000) as u32; + let ret = tree.lookup(hash, 128); + assert_eq!(ret, Some(&ii)); + } } - // Confirm all is correct - for (n, hash) in hashes.iter().enumerate() { - let ii = n as u32; - let ret = tree.lookup(hash, 250); - if n % 2 == 0 { - assert_eq!(ret, Some(&ii)); - } else { - assert_eq!(ret, None); - } - } - } + #[test] + fn patricia_iter_test() { + let n_elems = 5000; + let mut tree = PatriciaTree::new(); + let mut data = Vec::from_elem(n_elems, None); + // Start by inserting a bunch of stuff + for i in 0..n_elems { + let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); + tree.insert(&hash, 128, i); + *data.get_mut(i) = Some(()); + } - #[test] - fn patricia_insert_substring_keys() { - // This test uses a bunch of keys that are substrings of each other - // to make sure insertion and deletion does not lose data - let mut tree = PatriciaTree::new(); - let mut hashes = vec![]; - // Start by inserting a bunch of chunder - for i in 1u32..500 { - let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); - tree.insert(&hash, 128, i * 1000); - hashes.push(hash); - } - // Do the actual test -- note that we also test insertion and deletion - // at the root here. - for i in 0u32..10 { - tree.insert(&Zero::zero(), i as usize, i); - } - for i in 0u32..10 { - let m = tree.lookup(&Zero::zero(), i as usize); - assert_eq!(m, Some(&i)); - } - for i in 0u32..10 { - let m = tree.delete(&Zero::zero(), i as usize); - assert_eq!(m, Some(i)); - } - // Check that the chunder was unharmed - for (n, hash) in hashes.iter().enumerate() { - let ii = ((n + 1) * 1000) as u32; - let ret = tree.lookup(hash, 128); - assert_eq!(ret, Some(&ii)); - } - } + // Iterate over and try to get everything + for n in tree.iter() { + assert!(data[*n].is_some()); + *data.get_mut(*n) = None; + } - #[test] - fn patricia_iter_test() { - let n_elems = 5000; - let mut tree = PatriciaTree::new(); - let mut data = Vec::from_elem(n_elems, None); - // Start by inserting a bunch of stuff - for i in 0..n_elems { - let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); - tree.insert(&hash, 128, i); - *data.get_mut(i) = Some(()); + // Check that we got everything + assert!(data.iter().all(|opt| opt.is_none())); } - // Iterate over and try to get everything - for n in tree.iter() { - assert!(data[*n].is_some()); - *data.get_mut(*n) = None; + #[test] + fn patricia_mut_iter_test() { + let n_elems = 5000; + let mut tree = PatriciaTree::new(); + let mut data = Vec::from_elem(n_elems, None); + // Start by inserting a bunch of stuff + for i in 0..n_elems { + let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); + tree.insert(&hash, 128, i); + *data.get_mut(i) = Some(()); + } + + // Iterate over and flip all the values + for n in tree.mut_iter() { + *n = n_elems - *n - 1; + } + + // Iterate over and try to get everything + for n in tree.mut_iter() { + assert!(data[*n].is_some()); + *data.get_mut(*n) = None; + } + + // Check that we got everything + assert!(data.iter().all(|opt| opt.is_none())); } - // Check that we got everything - assert!(data.iter().all(|opt| opt.is_none())); - } + #[test] + fn patricia_serialize_test() { + // Build a tree + let mut tree = PatriciaTree::new(); + let mut hashes = vec![]; + for i in 0u32..5000 { + let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); + tree.insert(&hash, 250, i); + hashes.push(hash); + } - #[test] - fn patricia_mut_iter_test() { - let n_elems = 5000; - let mut tree = PatriciaTree::new(); - let mut data = Vec::from_elem(n_elems, None); - // Start by inserting a bunch of stuff - for i in 0..n_elems { - let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); - tree.insert(&hash, 128, i); - *data.get_mut(i) = Some(()); + // Serialize it + let serialized = serialize(&tree).unwrap(); + // Deserialize it + let deserialized: io::Result> = deserialize(serialized); + assert!(deserialized.is_ok()); + let new_tree = deserialized.unwrap(); + + // Check that all inserts are still there + for (n, hash) in hashes.iter().enumerate() { + let ii = n as u32; + let ret = new_tree.lookup(hash, 250); + assert_eq!(ret, Some(&ii)); + } } - - // Iterate over and flip all the values - for n in tree.mut_iter() { - *n = n_elems - *n - 1; - } - - // Iterate over and try to get everything - for n in tree.mut_iter() { - assert!(data[*n].is_some()); - *data.get_mut(*n) = None; - } - - // Check that we got everything - assert!(data.iter().all(|opt| opt.is_none())); - } - - #[test] - fn patricia_serialize_test() { - // Build a tree - let mut tree = PatriciaTree::new(); - let mut hashes = vec![]; - for i in 0u32..5000 { - let hash = Sha256dHash::from_data(&[(i / 0x100) as u8, (i % 0x100) as u8]).into_le().low_128(); - tree.insert(&hash, 250, i); - hashes.push(hash); - } - - // Serialize it - let serialized = serialize(&tree).unwrap(); - // Deserialize it - let deserialized: io::Result> = deserialize(serialized); - assert!(deserialized.is_ok()); - let new_tree = deserialized.unwrap(); - - // Check that all inserts are still there - for (n, hash) in hashes.iter().enumerate() { - let ii = n as u32; - let ret = new_tree.lookup(hash, 250); - assert_eq!(ret, Some(&ii)); - } - } } diff --git a/src/util/uint.rs b/src/util/uint.rs index 789d7376..897b650d 100644 --- a/src/util/uint.rs +++ b/src/util/uint.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to @@ -25,483 +25,483 @@ use network::serialize::RawEncoder; use util::BitArray; macro_rules! construct_uint { - ($name:ident, $n_words:expr) => ( - /// Little-endian large integer type - #[repr(C)] - pub struct $name(pub [u64; $n_words]); - impl_array_newtype!($name, u64, $n_words); + ($name:ident, $n_words:expr) => ( + /// Little-endian large integer type + #[repr(C)] + pub struct $name(pub [u64; $n_words]); + impl_array_newtype!($name, u64, $n_words); - impl $name { - /// Conversion to u32 - #[inline] - pub fn low_u32(&self) -> u32 { - let &$name(ref arr) = self; - arr[0] as u32 - } + impl $name { + /// Conversion to u32 + #[inline] + pub fn low_u32(&self) -> u32 { + let &$name(ref arr) = self; + arr[0] as u32 + } - /// Return the least number of bits needed to represent the number - #[inline] - pub fn bits(&self) -> usize { - let &$name(ref arr) = self; - for i in 1..$n_words { - if arr[$n_words - i] > 0 { return (0x40 * ($n_words - i + 1)) - arr[$n_words - i].leading_zeros() as usize; } - } - 0x40 - arr[0].leading_zeros() as usize - } + /// Return the least number of bits needed to represent the number + #[inline] + pub fn bits(&self) -> usize { + let &$name(ref arr) = self; + for i in 1..$n_words { + if arr[$n_words - i] > 0 { return (0x40 * ($n_words - i + 1)) - arr[$n_words - i].leading_zeros() as usize; } + } + 0x40 - arr[0].leading_zeros() as usize + } - /// Multiplication by u32 - pub fn mul_u32(self, other: u32) -> $name { - let $name(ref arr) = self; - let mut carry = [0u64; $n_words]; - let mut ret = [0u64; $n_words]; - for i in 0..$n_words { - let upper = other as u64 * (arr[i] >> 32); - let lower = other as u64 * (arr[i] & 0xFFFFFFFF); - if i < 3 { - carry[i + 1] += upper >> 32; - } - ret[i] = lower + (upper << 32); - } - $name(ret) + $name(carry) - } - } - - impl ::std::num::FromPrimitive for $name { - #[inline] - fn from_u64(init: u64) -> Option<$name> { - let mut ret = [0; $n_words]; - ret[0] = init; - Some($name(ret)) - } - - #[inline] - fn from_i64(init: i64) -> Option<$name> { - ::std::num::FromPrimitive::from_u64(init as u64) - } - } - - impl ::std::num::Zero for $name { - fn zero() -> $name { $name([0; $n_words]) } - } - - impl ::std::num::One for $name { - fn one() -> $name { - $name({ let mut ret = [0; $n_words]; ret[0] = 1; ret }) - } - } - - impl ::std::ops::Add<$name> for $name { - type Output = $name; - - fn add(self, other: $name) -> $name { - let $name(ref me) = self; - let $name(ref you) = other; - let mut ret = [0u64; $n_words]; - let mut carry = [0u64; $n_words]; - let mut b_carry = false; - for i in 0..$n_words { - ret[i] = me[i] + you[i]; - if i < $n_words - 1 && ret[i] < me[i] { - carry[i + 1] = 1; - b_carry = true; - } - } - if b_carry { $name(ret) + $name(carry) } else { $name(ret) } - } - } - - impl ::std::ops::Sub<$name> for $name { - type Output = $name; - - #[inline] - fn sub(self, other: $name) -> $name { - self + !other + One::one() - } - } - - impl ::std::ops::Mul<$name> for $name { - type Output = $name; - - fn mul(self, other: $name) -> $name { - let mut me = self; - // TODO: be more efficient about this - for i in 0..(2 * $n_words) { - me = me + me.mul_u32((other >> (32 * i)).low_u32()) << (32 * i); - } - me - } - } - - impl ::std::ops::Div<$name> for $name { - type Output = $name; - - fn div(self, other: $name) -> $name { - let mut sub_copy = self; - let mut shift_copy = other; - let mut ret = [0u64; $n_words]; - - let my_bits = self.bits(); - let your_bits = other.bits(); - - // Check for division by 0 - assert!(your_bits != 0); - - // Early return in case we are dividing by a larger number than us - if my_bits < your_bits { - return $name(ret); + /// Multiplication by u32 + pub fn mul_u32(self, other: u32) -> $name { + let $name(ref arr) = self; + let mut carry = [0u64; $n_words]; + let mut ret = [0u64; $n_words]; + for i in 0..$n_words { + let upper = other as u64 * (arr[i] >> 32); + let lower = other as u64 * (arr[i] & 0xFFFFFFFF); + if i < 3 { + carry[i + 1] += upper >> 32; + } + ret[i] = lower + (upper << 32); + } + $name(ret) + $name(carry) + } } - // Bitwise long division - let mut shift = my_bits - your_bits; - shift_copy = shift_copy << shift; - loop { - if sub_copy >= shift_copy { - ret[shift / 64] |= 1 << (shift % 64); - sub_copy = sub_copy - shift_copy; - } - shift_copy = shift_copy >> 1; - if shift == 0 { break; } - shift -= 1; + impl ::std::num::FromPrimitive for $name { + #[inline] + fn from_u64(init: u64) -> Option<$name> { + let mut ret = [0; $n_words]; + ret[0] = init; + Some($name(ret)) + } + + #[inline] + fn from_i64(init: i64) -> Option<$name> { + ::std::num::FromPrimitive::from_u64(init as u64) + } } - $name(ret) - } - } - - impl BitArray for $name { - #[inline] - fn bit(&self, index: usize) -> bool { - let &$name(ref arr) = self; - arr[index / 64] & (1 << (index % 64)) != 0 - } - - #[inline] - fn bit_slice(&self, start: usize, end: usize) -> $name { - (*self >> start).mask(end - start) - } - - #[inline] - fn mask(&self, n: usize) -> $name { - let &$name(ref arr) = self; - let mut ret = [0; $n_words]; - for i in 0..$n_words { - if n >= 0x40 * (i + 1) { - ret[i] = arr[i]; - } else { - ret[i] = arr[i] & ((1 << (n - 0x40 * i)) - 1); - break; - } + impl ::std::num::Zero for $name { + fn zero() -> $name { $name([0; $n_words]) } } - $name(ret) - } - #[inline] - fn trailing_zeros(&self) -> usize { - let &$name(ref arr) = self; - for i in 0..($n_words - 1) { - if arr[i] > 0 { return (0x40 * i) + arr[i].trailing_zeros() as usize; } + impl ::std::num::One for $name { + fn one() -> $name { + $name({ let mut ret = [0; $n_words]; ret[0] = 1; ret }) + } } - (0x40 * ($n_words - 1)) + arr[3].trailing_zeros() as usize - } - } - impl ::std::ops::BitAnd<$name> for $name { - type Output = $name; + impl ::std::ops::Add<$name> for $name { + type Output = $name; - #[inline] - fn bitand(self, other: $name) -> $name { - let $name(ref arr1) = self; - let $name(ref arr2) = other; - let mut ret = [0u64; $n_words]; - for i in 0..$n_words { - ret[i] = arr1[i] & arr2[i]; + fn add(self, other: $name) -> $name { + let $name(ref me) = self; + let $name(ref you) = other; + let mut ret = [0u64; $n_words]; + let mut carry = [0u64; $n_words]; + let mut b_carry = false; + for i in 0..$n_words { + ret[i] = me[i] + you[i]; + if i < $n_words - 1 && ret[i] < me[i] { + carry[i + 1] = 1; + b_carry = true; + } + } + if b_carry { $name(ret) + $name(carry) } else { $name(ret) } + } } - $name(ret) - } - } - impl ::std::ops::BitXor<$name> for $name { - type Output = $name; + impl ::std::ops::Sub<$name> for $name { + type Output = $name; - #[inline] - fn bitxor(self, other: $name) -> $name { - let $name(ref arr1) = self; - let $name(ref arr2) = other; - let mut ret = [0u64; $n_words]; - for i in 0..$n_words { - ret[i] = arr1[i] ^ arr2[i]; + #[inline] + fn sub(self, other: $name) -> $name { + self + !other + One::one() + } } - $name(ret) - } - } - impl ::std::ops::BitOr<$name> for $name { - type Output = $name; + impl ::std::ops::Mul<$name> for $name { + type Output = $name; - #[inline] - fn bitor(self, other: $name) -> $name { - let $name(ref arr1) = self; - let $name(ref arr2) = other; - let mut ret = [0u64; $n_words]; - for i in 0..$n_words { - ret[i] = arr1[i] | arr2[i]; + fn mul(self, other: $name) -> $name { + let mut me = self; + // TODO: be more efficient about this + for i in 0..(2 * $n_words) { + me = me + me.mul_u32((other >> (32 * i)).low_u32()) << (32 * i); + } + me + } } - $name(ret) - } - } - impl ::std::ops::Not for $name { - type Output = $name; + impl ::std::ops::Div<$name> for $name { + type Output = $name; - #[inline] - fn not(self) -> $name { - let $name(ref arr) = self; - let mut ret = [0u64; $n_words]; - for i in 0..$n_words { - ret[i] = !arr[i]; + fn div(self, other: $name) -> $name { + let mut sub_copy = self; + let mut shift_copy = other; + let mut ret = [0u64; $n_words]; + + let my_bits = self.bits(); + let your_bits = other.bits(); + + // Check for division by 0 + assert!(your_bits != 0); + + // Early return in case we are dividing by a larger number than us + if my_bits < your_bits { + return $name(ret); + } + + // Bitwise long division + let mut shift = my_bits - your_bits; + shift_copy = shift_copy << shift; + loop { + if sub_copy >= shift_copy { + ret[shift / 64] |= 1 << (shift % 64); + sub_copy = sub_copy - shift_copy; + } + shift_copy = shift_copy >> 1; + if shift == 0 { break; } + shift -= 1; + } + + $name(ret) + } } - $name(ret) - } - } - impl ::std::ops::Shl for $name { - type Output = $name; + impl BitArray for $name { + #[inline] + fn bit(&self, index: usize) -> bool { + let &$name(ref arr) = self; + arr[index / 64] & (1 << (index % 64)) != 0 + } - fn shl(self, shift: usize) -> $name { - let $name(ref original) = self; - let mut ret = [0u64; $n_words]; - let word_shift = shift / 64; - let bit_shift = shift % 64; - for i in 0..$n_words { - // Shift - if bit_shift < 64 && i + word_shift < $n_words { - ret[i + word_shift] += original[i] << bit_shift; - } - // Carry - if bit_shift > 0 && i + word_shift + 1 < $n_words { - ret[i + word_shift + 1] += original[i] >> (64 - bit_shift); - } + #[inline] + fn bit_slice(&self, start: usize, end: usize) -> $name { + (*self >> start).mask(end - start) + } + + #[inline] + fn mask(&self, n: usize) -> $name { + let &$name(ref arr) = self; + let mut ret = [0; $n_words]; + for i in 0..$n_words { + if n >= 0x40 * (i + 1) { + ret[i] = arr[i]; + } else { + ret[i] = arr[i] & ((1 << (n - 0x40 * i)) - 1); + break; + } + } + $name(ret) + } + + #[inline] + fn trailing_zeros(&self) -> usize { + let &$name(ref arr) = self; + for i in 0..($n_words - 1) { + if arr[i] > 0 { return (0x40 * i) + arr[i].trailing_zeros() as usize; } + } + (0x40 * ($n_words - 1)) + arr[3].trailing_zeros() as usize + } } - $name(ret) - } - } - impl ::std::ops::Shr for $name { - type Output = $name; + impl ::std::ops::BitAnd<$name> for $name { + type Output = $name; - #[allow(unsigned_negate)] - fn shr(self, shift: usize) -> $name { - let $name(ref original) = self; - let mut ret = [0u64; $n_words]; - let word_shift = shift / 64; - let bit_shift = shift % 64; - for i in 0..$n_words { - // Shift - if bit_shift < 64 && i - word_shift < $n_words { - ret[i - word_shift] += original[i] >> bit_shift; - } - // Carry - if bit_shift > 0 && i - word_shift - 1 < $n_words { - ret[i - word_shift - 1] += original[i] << (64 - bit_shift); - } + #[inline] + fn bitand(self, other: $name) -> $name { + let $name(ref arr1) = self; + let $name(ref arr2) = other; + let mut ret = [0u64; $n_words]; + for i in 0..$n_words { + ret[i] = arr1[i] & arr2[i]; + } + $name(ret) + } } - $name(ret) - } - } - impl ::std::cmp::Ord for $name { - fn cmp(&self, other: &$name) -> ::std::cmp::Ordering { - let &$name(ref me) = self; - let &$name(ref you) = other; - for i in 0..$n_words { - if me[$n_words - 1 - i] < you[$n_words - 1 - i] { return ::std::cmp::Ordering::Less; } - if me[$n_words - 1 - i] > you[$n_words - 1 - i] { return ::std::cmp::Ordering::Greater; } + impl ::std::ops::BitXor<$name> for $name { + type Output = $name; + + #[inline] + fn bitxor(self, other: $name) -> $name { + let $name(ref arr1) = self; + let $name(ref arr2) = other; + let mut ret = [0u64; $n_words]; + for i in 0..$n_words { + ret[i] = arr1[i] ^ arr2[i]; + } + $name(ret) + } } - return ::std::cmp::Ordering::Equal; - } - } - impl ::std::cmp::PartialOrd for $name { - fn partial_cmp(&self, other: &$name) -> Option<::std::cmp::Ordering> { - Some(self.cmp(other)) - } - } + impl ::std::ops::BitOr<$name> for $name { + type Output = $name; - impl fmt::Debug for $name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let &$name(ref data) = self; - try!(write!(f, "0x")); - for ch in data.iter().rev() { - try!(write!(f, "{:02x}", ch)); + #[inline] + fn bitor(self, other: $name) -> $name { + let $name(ref arr1) = self; + let $name(ref arr2) = other; + let mut ret = [0u64; $n_words]; + for i in 0..$n_words { + ret[i] = arr1[i] | arr2[i]; + } + $name(ret) + } } - Ok(()) - } - } - impl ::network::encodable::ConsensusEncodable for $name { - #[inline] - fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { - use network::encodable::ConsensusEncodable; - let &$name(ref data) = self; - for word in data.iter() { try!(word.consensus_encode(s)); } - Ok(()) - } - } + impl ::std::ops::Not for $name { + type Output = $name; - impl ::network::encodable::ConsensusDecodable for $name { - fn consensus_decode(d: &mut D) -> Result<$name, D::Error> { - use network::encodable::ConsensusDecodable; - let ret: [u64; $n_words] = try!(ConsensusDecodable::consensus_decode(d)); - Ok($name(ret)) - } - } - ); + #[inline] + fn not(self) -> $name { + let $name(ref arr) = self; + let mut ret = [0u64; $n_words]; + for i in 0..$n_words { + ret[i] = !arr[i]; + } + $name(ret) + } + } + + impl ::std::ops::Shl for $name { + type Output = $name; + + fn shl(self, shift: usize) -> $name { + let $name(ref original) = self; + let mut ret = [0u64; $n_words]; + let word_shift = shift / 64; + let bit_shift = shift % 64; + for i in 0..$n_words { + // Shift + if bit_shift < 64 && i + word_shift < $n_words { + ret[i + word_shift] += original[i] << bit_shift; + } + // Carry + if bit_shift > 0 && i + word_shift + 1 < $n_words { + ret[i + word_shift + 1] += original[i] >> (64 - bit_shift); + } + } + $name(ret) + } + } + + impl ::std::ops::Shr for $name { + type Output = $name; + + #[allow(unsigned_negate)] + fn shr(self, shift: usize) -> $name { + let $name(ref original) = self; + let mut ret = [0u64; $n_words]; + let word_shift = shift / 64; + let bit_shift = shift % 64; + for i in 0..$n_words { + // Shift + if bit_shift < 64 && i - word_shift < $n_words { + ret[i - word_shift] += original[i] >> bit_shift; + } + // Carry + if bit_shift > 0 && i - word_shift - 1 < $n_words { + ret[i - word_shift - 1] += original[i] << (64 - bit_shift); + } + } + $name(ret) + } + } + + impl ::std::cmp::Ord for $name { + fn cmp(&self, other: &$name) -> ::std::cmp::Ordering { + let &$name(ref me) = self; + let &$name(ref you) = other; + for i in 0..$n_words { + if me[$n_words - 1 - i] < you[$n_words - 1 - i] { return ::std::cmp::Ordering::Less; } + if me[$n_words - 1 - i] > you[$n_words - 1 - i] { return ::std::cmp::Ordering::Greater; } + } + return ::std::cmp::Ordering::Equal; + } + } + + impl ::std::cmp::PartialOrd for $name { + fn partial_cmp(&self, other: &$name) -> Option<::std::cmp::Ordering> { + Some(self.cmp(other)) + } + } + + impl fmt::Debug for $name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let &$name(ref data) = self; + try!(write!(f, "0x")); + for ch in data.iter().rev() { + try!(write!(f, "{:02x}", ch)); + } + Ok(()) + } + } + + impl ::network::encodable::ConsensusEncodable for $name { + #[inline] + fn consensus_encode(&self, s: &mut S) -> Result<(), S::Error> { + use network::encodable::ConsensusEncodable; + let &$name(ref data) = self; + for word in data.iter() { try!(word.consensus_encode(s)); } + Ok(()) + } + } + + impl ::network::encodable::ConsensusDecodable for $name { + fn consensus_decode(d: &mut D) -> Result<$name, D::Error> { + use network::encodable::ConsensusDecodable; + let ret: [u64; $n_words] = try!(ConsensusDecodable::consensus_decode(d)); + Ok($name(ret)) + } + } + ); } construct_uint!(Uint256, 4); construct_uint!(Uint128, 2); impl Uint256 { - /// Increment by 1 - #[inline] - pub fn increment(&mut self) { - let &mut Uint256(ref mut arr) = self; - arr[0] += 1; - if arr[0] == 0 { - arr[1] += 1; - if arr[1] == 0 { - arr[2] += 1; - if arr[2] == 0 { - arr[3] += 1; + /// Increment by 1 + #[inline] + pub fn increment(&mut self) { + let &mut Uint256(ref mut arr) = self; + arr[0] += 1; + if arr[0] == 0 { + arr[1] += 1; + if arr[1] == 0 { + arr[2] += 1; + if arr[2] == 0 { + arr[3] += 1; + } + } } - } } - } - /// Decay to a uint128 - #[inline] - pub fn low_128(&self) -> Uint128 { - let &Uint256(data) = self; - Uint128([data[0], data[1]]) - } + /// Decay to a uint128 + #[inline] + pub fn low_128(&self) -> Uint128 { + let &Uint256(data) = self; + Uint128([data[0], data[1]]) + } } #[cfg(test)] mod tests { - use std::io; - use std::num::from_u64; + use std::io; + use std::num::from_u64; - use network::serialize::{deserialize, serialize}; - use util::uint::Uint256; - use util::BitArray; + use network::serialize::{deserialize, serialize}; + use util::uint::Uint256; + use util::BitArray; - #[test] - pub fn uint256_bits_test() { - assert_eq!(from_u64::(255).unwrap().bits(), 8); - assert_eq!(from_u64::(256).unwrap().bits(), 9); - assert_eq!(from_u64::(300).unwrap().bits(), 9); - assert_eq!(from_u64::(60000).unwrap().bits(), 16); - assert_eq!(from_u64::(70000).unwrap().bits(), 17); + #[test] + pub fn uint256_bits_test() { + assert_eq!(from_u64::(255).unwrap().bits(), 8); + assert_eq!(from_u64::(256).unwrap().bits(), 9); + assert_eq!(from_u64::(300).unwrap().bits(), 9); + assert_eq!(from_u64::(60000).unwrap().bits(), 16); + assert_eq!(from_u64::(70000).unwrap().bits(), 17); - // Try to read the following lines out loud quickly - let mut shl: Uint256 = from_u64(70000).unwrap(); - shl = shl << 100; - assert_eq!(shl.bits(), 117); - shl = shl << 100; - assert_eq!(shl.bits(), 217); - shl = shl << 100; - assert_eq!(shl.bits(), 0); + // Try to read the following lines out loud quickly + let mut shl: Uint256 = from_u64(70000).unwrap(); + shl = shl << 100; + assert_eq!(shl.bits(), 117); + shl = shl << 100; + assert_eq!(shl.bits(), 217); + shl = shl << 100; + assert_eq!(shl.bits(), 0); - // Bit set check - assert!(!from_u64::(10).unwrap().bit(0)); - assert!(from_u64::(10).unwrap().bit(1)); - assert!(!from_u64::(10).unwrap().bit(2)); - assert!(from_u64::(10).unwrap().bit(3)); - assert!(!from_u64::(10).unwrap().bit(4)); - } + // Bit set check + assert!(!from_u64::(10).unwrap().bit(0)); + assert!(from_u64::(10).unwrap().bit(1)); + assert!(!from_u64::(10).unwrap().bit(2)); + assert!(from_u64::(10).unwrap().bit(3)); + assert!(!from_u64::(10).unwrap().bit(4)); + } - #[test] - pub fn uint256_comp_test() { - let small = Uint256([10u64, 0, 0, 0]); - let big = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); - let bigger = Uint256([0x9C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); - let biggest = Uint256([0x5C8C3EE70C644118u64, 0x0209E7378231E632, 0, 1]); + #[test] + pub fn uint256_comp_test() { + let small = Uint256([10u64, 0, 0, 0]); + let big = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); + let bigger = Uint256([0x9C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); + let biggest = Uint256([0x5C8C3EE70C644118u64, 0x0209E7378231E632, 0, 1]); - assert!(small < big); - assert!(big < bigger); - assert!(bigger < biggest); - assert!(bigger <= biggest); - assert!(biggest <= biggest); - assert!(bigger >= big); - assert!(bigger >= small); - assert!(small <= small); - } + assert!(small < big); + assert!(big < bigger); + assert!(bigger < biggest); + assert!(bigger <= biggest); + assert!(biggest <= biggest); + assert!(bigger >= big); + assert!(bigger >= small); + assert!(small <= small); + } - #[test] - pub fn uint256_arithmetic_test() { - let init: Uint256 = from_u64(0xDEADBEEFDEADBEEF).unwrap(); - let copy = init; + #[test] + pub fn uint256_arithmetic_test() { + let init: Uint256 = from_u64(0xDEADBEEFDEADBEEF).unwrap(); + let copy = init; - let add = init + copy; - assert_eq!(add, Uint256([0xBD5B7DDFBD5B7DDEu64, 1, 0, 0])); - // Bitshifts - let shl = add << 88; - assert_eq!(shl, Uint256([0u64, 0xDFBD5B7DDE000000, 0x1BD5B7D, 0])); - let shr = shl >> 40; - assert_eq!(shr, Uint256([0x7DDE000000000000u64, 0x0001BD5B7DDFBD5B, 0, 0])); - // Increment - let mut incr = shr; - incr.increment(); - assert_eq!(incr, Uint256([0x7DDE000000000001u64, 0x0001BD5B7DDFBD5B, 0, 0])); - // Subtraction - let sub = incr - init; - assert_eq!(sub, Uint256([0x9F30411021524112u64, 0x0001BD5B7DDFBD5A, 0, 0])); - // Multiplication - let mult = sub.mul_u32(300); - assert_eq!(mult, Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0])); - // Division - assert_eq!(from_u64::(105).unwrap() / - from_u64::(5).unwrap(), - from_u64::(21).unwrap()); - let div = mult / from_u64::(300).unwrap(); - assert_eq!(div, Uint256([0x9F30411021524112u64, 0x0001BD5B7DDFBD5A, 0, 0])); - // TODO: bit inversion - } + let add = init + copy; + assert_eq!(add, Uint256([0xBD5B7DDFBD5B7DDEu64, 1, 0, 0])); + // Bitshifts + let shl = add << 88; + assert_eq!(shl, Uint256([0u64, 0xDFBD5B7DDE000000, 0x1BD5B7D, 0])); + let shr = shl >> 40; + assert_eq!(shr, Uint256([0x7DDE000000000000u64, 0x0001BD5B7DDFBD5B, 0, 0])); + // Increment + let mut incr = shr; + incr.increment(); + assert_eq!(incr, Uint256([0x7DDE000000000001u64, 0x0001BD5B7DDFBD5B, 0, 0])); + // Subtraction + let sub = incr - init; + assert_eq!(sub, Uint256([0x9F30411021524112u64, 0x0001BD5B7DDFBD5A, 0, 0])); + // Multiplication + let mult = sub.mul_u32(300); + assert_eq!(mult, Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0])); + // Division + assert_eq!(from_u64::(105).unwrap() / + from_u64::(5).unwrap(), + from_u64::(21).unwrap()); + let div = mult / from_u64::(300).unwrap(); + assert_eq!(div, Uint256([0x9F30411021524112u64, 0x0001BD5B7DDFBD5A, 0, 0])); + // TODO: bit inversion + } - #[test] - pub fn uint256_bitslice_test() { - let init = from_u64::(0xDEADBEEFDEADBEEF).unwrap(); - let add = init + (init << 64); - assert_eq!(add.bit_slice(64, 128), init); - assert_eq!(add.mask(64), init); - } + #[test] + pub fn uint256_bitslice_test() { + let init = from_u64::(0xDEADBEEFDEADBEEF).unwrap(); + let add = init + (init << 64); + assert_eq!(add.bit_slice(64, 128), init); + assert_eq!(add.mask(64), init); + } - #[test] - pub fn uint256_extreme_bitshift_test() { - // Shifting a u64 by 64 bits gives an undefined value, so make sure that - // we're doing the Right Thing here - let init = from_u64::(0xDEADBEEFDEADBEEF).unwrap(); + #[test] + pub fn uint256_extreme_bitshift_test() { + // Shifting a u64 by 64 bits gives an undefined value, so make sure that + // we're doing the Right Thing here + let init = from_u64::(0xDEADBEEFDEADBEEF).unwrap(); - assert_eq!(init << 64, Uint256([0, 0xDEADBEEFDEADBEEF, 0, 0])); - let add = (init << 64) + init; - assert_eq!(add, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); - assert_eq!(add >> 0, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); - assert_eq!(add << 0, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); - assert_eq!(add >> 64, Uint256([0xDEADBEEFDEADBEEF, 0, 0, 0])); - assert_eq!(add << 64, Uint256([0, 0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0])); - } + assert_eq!(init << 64, Uint256([0, 0xDEADBEEFDEADBEEF, 0, 0])); + let add = (init << 64) + init; + assert_eq!(add, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); + assert_eq!(add >> 0, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); + assert_eq!(add << 0, Uint256([0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0, 0])); + assert_eq!(add >> 64, Uint256([0xDEADBEEFDEADBEEF, 0, 0, 0])); + assert_eq!(add << 64, Uint256([0, 0xDEADBEEFDEADBEEF, 0xDEADBEEFDEADBEEF, 0])); + } - #[test] - pub fn uint256_serialize_test() { - let start1 = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); - let start2 = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0xABCD, 0xFFFF]); - let serial1 = serialize(&start1).unwrap(); - let serial2 = serialize(&start2).unwrap(); - let end1: io::Result = deserialize(serial1); - let end2: io::Result = deserialize(serial2); + #[test] + pub fn uint256_serialize_test() { + let start1 = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0, 0]); + let start2 = Uint256([0x8C8C3EE70C644118u64, 0x0209E7378231E632, 0xABCD, 0xFFFF]); + let serial1 = serialize(&start1).unwrap(); + let serial2 = serialize(&start2).unwrap(); + let end1: io::Result = deserialize(serial1); + let end2: io::Result = deserialize(serial2); - assert_eq!(end1, Ok(start1)); - assert_eq!(end2, Ok(start2)); - } + assert_eq!(end1, Ok(start1)); + assert_eq!(end2, Ok(start2)); + } } diff --git a/src/wallet/address.rs b/src/wallet/address.rs index 1842b011..9343ddd6 100644 --- a/src/wallet/address.rs +++ b/src/wallet/address.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without @@ -21,7 +21,7 @@ use crypto::digest::Digest; use crypto::sha2::Sha256; use std::ops; -use blockdata::script::Script; +use blockdata::script::{Script, ScriptBuilder}; use blockdata::opcodes; use network::constants::Network; use util::hash::Ripemd160Hash; @@ -30,189 +30,189 @@ use util::base58::{self, FromBase58, ToBase58}; #[derive(Clone, PartialEq, Eq)] /// A Bitcoin address pub struct Address { - /// The network on which this address is usable - pub network: Network, - /// The pubkeyhash that this address encodes - pub hash: Ripemd160Hash + /// The network on which this address is usable + pub network: Network, + /// The pubkeyhash that this address encodes + pub hash: Ripemd160Hash } impl Address { - /// Creates an address from a public key - #[inline] - pub fn from_key(network: Network, pk: &PublicKey) -> Address { - let mut sha = Sha256::new(); - let mut out = [0;32]; - sha.input(&pk[..]); - sha.result(&mut out); - Address { - network: network, - hash: Ripemd160Hash::from_data(&out) + /// Creates an address from a public key + #[inline] + pub fn from_key(network: Network, pk: &PublicKey) -> Address { + let mut sha = Sha256::new(); + let mut out = [0;32]; + sha.input(&pk[..]); + sha.result(&mut out); + Address { + network: network, + hash: Ripemd160Hash::from_data(&out) + } } - } - /// Generates a script pubkey spending to this address - #[inline] - pub fn script_pubkey(&self) -> Script { - let mut script = Script::new(); - script.push_opcode(opcodes::All::OP_DUP); - script.push_opcode(opcodes::All::OP_HASH160); - script.push_slice(&self.hash[..]); - script.push_opcode(opcodes::All::OP_EQUALVERIFY); - script.push_opcode(opcodes::All::OP_CHECKSIG); - script - } + /// Generates a script pubkey spending to this address + #[inline] + pub fn script_pubkey(&self) -> Script { + let mut script = ScriptBuilder::new(); + script.push_opcode(opcodes::All::OP_DUP); + script.push_opcode(opcodes::All::OP_HASH160); + script.push_slice(&self.hash[..]); + script.push_opcode(opcodes::All::OP_EQUALVERIFY); + script.push_opcode(opcodes::All::OP_CHECKSIG); + script.into_script() + } } impl ops::Index for Address { - type Output = u8; - #[inline] - fn index(&self, index: usize) -> &u8 { - &self.hash[index] - } + type Output = u8; + #[inline] + fn index(&self, index: usize) -> &u8 { + &self.hash[index] + } } impl ops::Index> for Address { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::Range) -> &[u8] { - &self.hash[index] - } + type Output = [u8]; + #[inline] + fn index(&self, index: ops::Range) -> &[u8] { + &self.hash[index] + } } impl ops::Index> for Address { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::RangeTo) -> &[u8] { - &self.hash[index] - } + type Output = [u8]; + #[inline] + fn index(&self, index: ops::RangeTo) -> &[u8] { + &self.hash[index] + } } impl ops::Index> for Address { - type Output = [u8]; - #[inline] - fn index(&self, index: ops::RangeFrom) -> &[u8] { - &self.hash[index] - } + type Output = [u8]; + #[inline] + fn index(&self, index: ops::RangeFrom) -> &[u8] { + &self.hash[index] + } } impl ops::Index for Address { - type Output = [u8]; - #[inline] - fn index(&self, _: ops::RangeFull) -> &[u8] { - &self.hash[..] - } + type Output = [u8]; + #[inline] + fn index(&self, _: ops::RangeFull) -> &[u8] { + &self.hash[..] + } } /// Conversion from other types into an address pub trait ToAddress { - /// Copies `self` into a new `Address` - fn to_address(&self, network: Network) -> Address; + /// Copies `self` into a new `Address` + fn to_address(&self, network: Network) -> Address; } impl<'a> ToAddress for &'a [u8] { - #[inline] - fn to_address(&self, network: Network) -> Address { - Address { - network: network, - hash: Ripemd160Hash::from_slice(*self) + #[inline] + fn to_address(&self, network: Network) -> Address { + Address { + network: network, + hash: Ripemd160Hash::from_slice(*self) + } } - } } impl ToBase58 for Address { - fn base58_layout(&self) -> Vec { - let mut ret = vec![ - match self.network { - Network::Bitcoin => 0, - Network::Testnet => 111 - } - ]; - ret.push_all(&self.hash[..]); - ret - } + fn base58_layout(&self) -> Vec { + let mut ret = vec![ + match self.network { + Network::Bitcoin => 0, + Network::Testnet => 111 + } + ]; + ret.push_all(&self.hash[..]); + ret + } } impl FromBase58 for Address { - fn from_base58_layout(data: Vec) -> Result { - if data.len() != 21 { - return Err(base58::Error::InvalidLength(data.len())); - } + fn from_base58_layout(data: Vec) -> Result { + if data.len() != 21 { + return Err(base58::Error::InvalidLength(data.len())); + } - Ok(Address { - network: match data[0] { - 0 => Network::Bitcoin, - 111 => Network::Testnet, - x => { return Err(base58::Error::InvalidVersion(vec![x])); } - }, - hash: Ripemd160Hash::from_slice(&data[1..]) - }) - } + Ok(Address { + network: match data[0] { + 0 => Network::Bitcoin, + 111 => Network::Testnet, + x => { return Err(base58::Error::InvalidVersion(vec![x])); } + }, + hash: Ripemd160Hash::from_slice(&data[1..]) + }) + } } impl ::std::fmt::Debug for Address { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - write!(f, "{}", self.to_base58check()) - } + fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + write!(f, "{}", self.to_base58check()) + } } #[cfg(test)] mod tests { - use serialize::hex::FromHex; - use test::{Bencher, black_box}; + use serialize::hex::FromHex; + use test::{Bencher, black_box}; - use secp256k1::Secp256k1; + use secp256k1::Secp256k1; - use network::constants::Network::Bitcoin; - use util::hash::Ripemd160Hash; - use util::base58::{FromBase58, ToBase58}; - use super::Address; + use network::constants::Network::Bitcoin; + use util::hash::Ripemd160Hash; + use util::base58::{FromBase58, ToBase58}; + use super::Address; - #[test] - fn test_address_58() { - let addr = Address { - network: Bitcoin, - hash: Ripemd160Hash::from_slice(&"162c5ea71c0b23f5b9022ef047c4a86470a5b070".from_hex().unwrap()) - }; + #[test] + fn test_address_58() { + let addr = Address { + network: Bitcoin, + hash: Ripemd160Hash::from_slice(&"162c5ea71c0b23f5b9022ef047c4a86470a5b070".from_hex().unwrap()) + }; - assert_eq!(&addr.to_base58check(), "132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM"); - assert_eq!(FromBase58::from_base58check("132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM"), Ok(addr)); - } + assert_eq!(&addr.to_base58check(), "132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM"); + assert_eq!(FromBase58::from_base58check("132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM"), Ok(addr)); + } - #[bench] - pub fn generate_address(bh: &mut Bencher) { - let mut s = Secp256k1::new().unwrap(); - bh.iter( || { - let (sk, pk) = s.generate_keypair(true); - black_box(sk); - black_box(pk); - let addr = Address::from_key(Bitcoin, &pk); - black_box(addr); - }); - } + #[bench] + pub fn generate_address(bh: &mut Bencher) { + let mut s = Secp256k1::new().unwrap(); + bh.iter( || { + let (sk, pk) = s.generate_keypair(true); + black_box(sk); + black_box(pk); + let addr = Address::from_key(Bitcoin, &pk); + black_box(addr); + }); + } - #[bench] - pub fn generate_uncompressed_address(bh: &mut Bencher) { - let mut s = Secp256k1::new().unwrap(); - bh.iter( || { - let (sk, pk) = s.generate_keypair(false); - black_box(sk); - black_box(pk); - let addr = Address::from_key(Bitcoin, &pk); - black_box(addr); - }); - } + #[bench] + pub fn generate_uncompressed_address(bh: &mut Bencher) { + let mut s = Secp256k1::new().unwrap(); + bh.iter( || { + let (sk, pk) = s.generate_keypair(false); + black_box(sk); + black_box(pk); + let addr = Address::from_key(Bitcoin, &pk); + black_box(addr); + }); + } - #[bench] - pub fn generate_sequential_address(bh: &mut Bencher) { - let mut s = Secp256k1::new().unwrap(); - let (sk, _) = s.generate_keypair(true); - let mut iter = sk.sequence(true); - bh.iter( || { - let (sk, pk) = iter.next().unwrap(); - black_box(sk); - let addr = Address::from_key(Bitcoin, &pk); - black_box(addr); - }); - } + #[bench] + pub fn generate_sequential_address(bh: &mut Bencher) { + let mut s = Secp256k1::new().unwrap(); + let (sk, _) = s.generate_keypair(true); + let mut iter = sk.sequence(true); + bh.iter( || { + let (sk, pk) = iter.next().unwrap(); + black_box(sk); + let addr = Address::from_key(Bitcoin, &pk); + black_box(addr); + }); + } } diff --git a/src/wallet/address_index.rs b/src/wallet/address_index.rs index 5deef405..0d1d9a84 100644 --- a/src/wallet/address_index.rs +++ b/src/wallet/address_index.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without @@ -35,100 +35,100 @@ use util::hash::Sha256dHash; /// The type of a wallet-spendable txout #[derive(Clone, PartialEq, Eq, Debug)] pub enum WalletTxOutType { - /// Pay-to-address transaction redeemable using an ECDSA key - PayToAddress(SecretKey), - /// Undetermined - Unknown + /// Pay-to-address transaction redeemable using an ECDSA key + PayToAddress(SecretKey), + /// Undetermined + Unknown } /// A txout that is spendable by the wallet #[derive(Clone, PartialEq, Eq, Debug)] pub struct WalletTxOut { - /// The TXID of the transaction this output is part of - pub txid: Sha256dHash, - /// The index of the output in its transaction - pub vout: u32, - /// The blockheight at which this output appeared in the blockchain - pub height: u32, - /// The actual output - pub txo: TxOut, - /// A classification of the output - pub kind: WalletTxOutType + /// The TXID of the transaction this output is part of + pub txid: Sha256dHash, + /// The index of the output in its transaction + pub vout: u32, + /// The blockheight at which this output appeared in the blockchain + pub height: u32, + /// The actual output + pub txo: TxOut, + /// A classification of the output + pub kind: WalletTxOutType } /// An address index #[derive(Clone, PartialEq, Eq, Debug)] pub struct AddressIndex { - tentative_index: HashMap>, - index: HashMap<(Sha256dHash, u32), Vec>, - network: Network, - k1: u64, - k2: u64 + tentative_index: HashMap>, + index: HashMap<(Sha256dHash, u32), Vec>, + network: Network, + k1: u64, + k2: u64 } impl AddressIndex { - /// Creates a new address index from a wallet (which provides an authenticated - /// hash function for prefix filtering) and UTXO set (which is what gets filtered). - pub fn new(utxo_set: &UtxoSet, wallet: &Wallet) -> AddressIndex { - let (k1, k2) = wallet.siphash_key(); - let mut ret = AddressIndex { - tentative_index: HashMap::with_capacity(utxo_set.n_utxos() / 256), - index: HashMap::new(), - network: wallet.network(), - k1: k1, - k2: k2 - }; - for (key, idx, txo, height) in utxo_set.iter() { - if ret.admissible_txo(txo) { - let new = WalletTxOut { - txid: key, - vout: idx, - height: height, - txo: txo.clone(), - kind: WalletTxOutType::Unknown + /// Creates a new address index from a wallet (which provides an authenticated + /// hash function for prefix filtering) and UTXO set (which is what gets filtered). + pub fn new(utxo_set: &UtxoSet, wallet: &Wallet) -> AddressIndex { + let (k1, k2) = wallet.siphash_key(); + let mut ret = AddressIndex { + tentative_index: HashMap::with_capacity(utxo_set.n_utxos() / 256), + index: HashMap::new(), + network: wallet.network(), + k1: k1, + k2: k2 }; - let mut entry = ret.tentative_index.entry(txo.script_pubkey.clone()); + for (key, idx, txo, height) in utxo_set.iter() { + if ret.admissible_txo(txo) { + let new = WalletTxOut { + txid: key, + vout: idx, + height: height, + txo: txo.clone(), + kind: WalletTxOutType::Unknown + }; + let mut entry = ret.tentative_index.entry(txo.script_pubkey.clone()); + let txos = entry.or_insert(vec![]); + txos.push(new); + } + } + ret + } + + /// + #[inline] + pub fn index_wallet_txo(&mut self, wtx: &WalletTxOut, kind: WalletTxOutType) { + let mut new = wtx.clone(); + new.kind = kind; + let mut entry = self.index.entry((wtx.txid, wtx.vout)); let txos = entry.or_insert(vec![]); txos.push(new); - } } - ret - } - /// - #[inline] - pub fn index_wallet_txo(&mut self, wtx: &WalletTxOut, kind: WalletTxOutType) { - let mut new = wtx.clone(); - new.kind = kind; - let mut entry = self.index.entry((wtx.txid, wtx.vout)); - let txos = entry.or_insert(vec![]); - txos.push(new); - } - - /// A filtering function used for creating a small address index. - #[inline] - pub fn admissible_address(&self, addr: &Address) -> bool { - let mut hasher = SipHasher::new_with_keys(self.k1, self.k2); - (&addr[..]).hash(&mut hasher); - hasher.finish() & 0xFF == 0 - } - - /// A filtering function used for creating a small address index. - #[inline] - pub fn admissible_txo(&self, out: &TxOut) -> bool { - match out.classify(self.network) { - PayToPubkeyHash(addr) => self.admissible_address(&addr), - _ => false + /// A filtering function used for creating a small address index. + #[inline] + pub fn admissible_address(&self, addr: &Address) -> bool { + let mut hasher = SipHasher::new_with_keys(self.k1, self.k2); + (&addr[..]).hash(&mut hasher); + hasher.finish() & 0xFF == 0 } - } - /// Lookup a txout by its scriptpubkey. Returns a slice because there - /// may be more than one for any given scriptpubkey. - #[inline] - pub fn find_by_script<'a>(&'a self, pubkey: &Script) -> &'a [WalletTxOut] { - self.tentative_index.get(pubkey).map(|v| &v[..]).unwrap_or(&[]) - } + /// A filtering function used for creating a small address index. + #[inline] + pub fn admissible_txo(&self, out: &TxOut) -> bool { + match out.classify(self.network) { + PayToPubkeyHash(addr) => self.admissible_address(&addr), + _ => false + } + } + + /// Lookup a txout by its scriptpubkey. Returns a slice because there + /// may be more than one for any given scriptpubkey. + #[inline] + pub fn find_by_script<'a>(&'a self, pubkey: &Script) -> &'a [WalletTxOut] { + self.tentative_index.get(pubkey).map(|v| &v[..]).unwrap_or(&[]) + } } diff --git a/src/wallet/bip32.rs b/src/wallet/bip32.rs index b2845363..b1aeb293 100644 --- a/src/wallet/bip32.rs +++ b/src/wallet/bip32.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without @@ -46,535 +46,535 @@ impl_array_newtype_show!(Fingerprint); impl_array_newtype_encodable!(Fingerprint, u8, 4); impl Default for Fingerprint { - fn default() -> Fingerprint { Fingerprint([0, 0, 0, 0]) } + fn default() -> Fingerprint { Fingerprint([0, 0, 0, 0]) } } /// Extended private key #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub struct ExtendedPrivKey { - /// The network this key is to be used on - pub network: Network, - /// How many derivations this key is from the master (which is 0) - pub depth: u8, - /// Fingerprint of the parent key (0 for master) - pub parent_fingerprint: Fingerprint, - /// Child number of the key used to derive from parent (0 for master) - pub child_number: ChildNumber, - /// Secret key - pub secret_key: SecretKey, - /// Chain code - pub chain_code: ChainCode + /// The network this key is to be used on + pub network: Network, + /// How many derivations this key is from the master (which is 0) + pub depth: u8, + /// Fingerprint of the parent key (0 for master) + pub parent_fingerprint: Fingerprint, + /// Child number of the key used to derive from parent (0 for master) + pub child_number: ChildNumber, + /// Secret key + pub secret_key: SecretKey, + /// Chain code + pub chain_code: ChainCode } /// Extended public key #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub struct ExtendedPubKey { - /// The network this key is to be used on - pub network: Network, - /// How many derivations this key is from the master (which is 0) - pub depth: u8, - /// Fingerprint of the parent key - pub parent_fingerprint: Fingerprint, - /// Child number of the key used to derive from parent (0 for master) - pub child_number: ChildNumber, - /// Public key - pub public_key: PublicKey, - /// Chain code - pub chain_code: ChainCode + /// The network this key is to be used on + pub network: Network, + /// How many derivations this key is from the master (which is 0) + pub depth: u8, + /// Fingerprint of the parent key + pub parent_fingerprint: Fingerprint, + /// Child number of the key used to derive from parent (0 for master) + pub child_number: ChildNumber, + /// Public key + pub public_key: PublicKey, + /// Chain code + pub chain_code: ChainCode } /// A child number for a derived key #[derive(Clone, PartialEq, Eq, Debug)] pub enum ChildNumber { - /// Hardened key index, within [0, 2^31 - 1] - Hardened(u32), - /// Non-hardened key, within [0, 2^31 - 1] - Normal(u32), + /// Hardened key index, within [0, 2^31 - 1] + Hardened(u32), + /// Non-hardened key, within [0, 2^31 - 1] + Normal(u32), } impl Serialize for ChildNumber { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: Serializer { - match *self { - ChildNumber::Hardened(n) => (n + (1 << 31)).serialize(s), - ChildNumber::Normal(n) => n.serialize(s) + fn serialize(&self, s: &mut S) -> Result<(), S::Error> + where S: Serializer { + match *self { + ChildNumber::Hardened(n) => (n + (1 << 31)).serialize(s), + ChildNumber::Normal(n) => n.serialize(s) + } } - } } impl Deserialize for ChildNumber { - fn deserialize(d: &mut D) -> Result - where D: Deserializer { - let n: u32 = try!(Deserialize::deserialize(d)); - if n < (1 << 31) { - Ok(ChildNumber::Normal(n)) - } else { - Ok(ChildNumber::Hardened(n - (1 << 31))) + fn deserialize(d: &mut D) -> Result + where D: Deserializer { + let n: u32 = try!(Deserialize::deserialize(d)); + if n < (1 << 31) { + Ok(ChildNumber::Normal(n)) + } else { + Ok(ChildNumber::Hardened(n - (1 << 31))) + } } - } } /// A BIP32 error #[derive(Clone, PartialEq, Eq, Debug)] pub enum Error { - /// A pk->pk derivation was attempted on a hardened key - CannotDeriveFromHardenedKey, - /// A secp256k1 error occured - Ecdsa(secp256k1::Error), - /// A child number was provided that was out of range - InvalidChildNumber(ChildNumber), - /// Error creating a master seed --- for application use - RngError(String) + /// A pk->pk derivation was attempted on a hardened key + CannotDeriveFromHardenedKey, + /// A secp256k1 error occured + Ecdsa(secp256k1::Error), + /// A child number was provided that was out of range + InvalidChildNumber(ChildNumber), + /// Error creating a master seed --- for application use + RngError(String) } impl ExtendedPrivKey { - /// Construct a new master key from a seed value - pub fn new_master(network: Network, seed: &[u8]) -> Result { - let mut result = [0; 64]; - let mut hmac = Hmac::new(Sha512::new(), b"Bitcoin seed"); - hmac.input(seed); - hmac.raw_result(&mut result); + /// Construct a new master key from a seed value + pub fn new_master(network: Network, seed: &[u8]) -> Result { + let mut result = [0; 64]; + let mut hmac = Hmac::new(Sha512::new(), b"Bitcoin seed"); + hmac.input(seed); + hmac.raw_result(&mut result); - Ok(ExtendedPrivKey { - network: network, - depth: 0, - parent_fingerprint: Default::default(), - child_number: ChildNumber::Normal(0), - secret_key: try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)), - chain_code: ChainCode::from_slice(&result[32..]) - }) - } - - /// Creates a privkey from a path - pub fn from_path(master: &ExtendedPrivKey, path: &[ChildNumber]) - -> Result { - let mut sk = *master; - for &num in path.iter() { - sk = try!(sk.ckd_priv(num)); + Ok(ExtendedPrivKey { + network: network, + depth: 0, + parent_fingerprint: Default::default(), + child_number: ChildNumber::Normal(0), + secret_key: try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)), + chain_code: ChainCode::from_slice(&result[32..]) + }) } - Ok(sk) - } - /// Private->Private child key derivation - pub fn ckd_priv(&self, i: ChildNumber) -> Result { - let mut result = [0; 64]; - let mut hmac = Hmac::new(Sha512::new(), &self.chain_code[..]); - let mut be_n = [0; 32]; - match i { - ChildNumber::Normal(n) => { - if n >= (1 << 31) { return Err(Error::InvalidChildNumber(i)) } - // Non-hardened key: compute public data and use that - secp256k1::init(); - // Note the unwrap: this is fine, we checked the SK when we created it - hmac.input(&PublicKey::from_secret_key(&self.secret_key, true)[..]); - write_u32_be(&mut be_n, n); - } - ChildNumber::Hardened(n) => { - if n >= (1 << 31) { return Err(Error::InvalidChildNumber(i)) } - // Hardened key: use only secret data to prevent public derivation - hmac.input(&[0u8]); - hmac.input(&self.secret_key[..]); - write_u32_be(&mut be_n, n + (1 << 31)); - } + /// Creates a privkey from a path + pub fn from_path(master: &ExtendedPrivKey, path: &[ChildNumber]) + -> Result { + let mut sk = *master; + for &num in path.iter() { + sk = try!(sk.ckd_priv(num)); + } + Ok(sk) } - hmac.input(&be_n); - hmac.raw_result(&mut result); - let mut sk = try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)); - try!(sk.add_assign(&self.secret_key).map_err(Error::Ecdsa)); - Ok(ExtendedPrivKey { - network: self.network, - depth: self.depth + 1, - parent_fingerprint: self.fingerprint(), - child_number: i, - secret_key: sk, - chain_code: ChainCode::from_slice(&result[32..]) - }) - } + /// Private->Private child key derivation + pub fn ckd_priv(&self, i: ChildNumber) -> Result { + let mut result = [0; 64]; + let mut hmac = Hmac::new(Sha512::new(), &self.chain_code[..]); + let mut be_n = [0; 32]; + match i { + ChildNumber::Normal(n) => { + if n >= (1 << 31) { return Err(Error::InvalidChildNumber(i)) } + // Non-hardened key: compute public data and use that + secp256k1::init(); + // Note the unwrap: this is fine, we checked the SK when we created it + hmac.input(&PublicKey::from_secret_key(&self.secret_key, true)[..]); + write_u32_be(&mut be_n, n); + } + ChildNumber::Hardened(n) => { + if n >= (1 << 31) { return Err(Error::InvalidChildNumber(i)) } + // Hardened key: use only secret data to prevent public derivation + hmac.input(&[0u8]); + hmac.input(&self.secret_key[..]); + write_u32_be(&mut be_n, n + (1 << 31)); + } + } + hmac.input(&be_n); + hmac.raw_result(&mut result); + let mut sk = try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)); + try!(sk.add_assign(&self.secret_key).map_err(Error::Ecdsa)); - /// Returns the HASH160 of the chaincode - pub fn identifier(&self) -> [u8; 20] { - let mut sha2_res = [0; 32]; - let mut ripemd_res = [0; 20]; - // Compute extended public key - let pk = ExtendedPubKey::from_private(self); - // Do SHA256 of just the ECDSA pubkey - let mut sha2 = Sha256::new(); - sha2.input(&pk.public_key[..]); - sha2.result(&mut sha2_res); - // do RIPEMD160 - let mut ripemd = Ripemd160::new(); - ripemd.input(&sha2_res); - ripemd.result(&mut ripemd_res); - // Return - ripemd_res - } + Ok(ExtendedPrivKey { + network: self.network, + depth: self.depth + 1, + parent_fingerprint: self.fingerprint(), + child_number: i, + secret_key: sk, + chain_code: ChainCode::from_slice(&result[32..]) + }) + } - /// Returns the first four bytes of the identifier - pub fn fingerprint(&self) -> Fingerprint { - Fingerprint::from_slice(&self.identifier()[0..4]) - } + /// Returns the HASH160 of the chaincode + pub fn identifier(&self) -> [u8; 20] { + let mut sha2_res = [0; 32]; + let mut ripemd_res = [0; 20]; + // Compute extended public key + let pk = ExtendedPubKey::from_private(self); + // Do SHA256 of just the ECDSA pubkey + let mut sha2 = Sha256::new(); + sha2.input(&pk.public_key[..]); + sha2.result(&mut sha2_res); + // do RIPEMD160 + let mut ripemd = Ripemd160::new(); + ripemd.input(&sha2_res); + ripemd.result(&mut ripemd_res); + // Return + ripemd_res + } + + /// Returns the first four bytes of the identifier + pub fn fingerprint(&self) -> Fingerprint { + Fingerprint::from_slice(&self.identifier()[0..4]) + } } impl ExtendedPubKey { - /// Derives a public key from a private key - pub fn from_private(sk: &ExtendedPrivKey) -> ExtendedPubKey { - secp256k1::init(); - ExtendedPubKey { - network: sk.network, - depth: sk.depth, - parent_fingerprint: sk.parent_fingerprint, - child_number: sk.child_number, - public_key: PublicKey::from_secret_key(&sk.secret_key, true), - chain_code: sk.chain_code - } - } - - /// Public->Public child key derivation - pub fn ckd_pub(&self, i: ChildNumber) -> Result { - match i { - ChildNumber::Hardened(n) => { - if n >= (1 << 31) { - Err(Error::InvalidChildNumber(i)) - } else { - Err(Error::CannotDeriveFromHardenedKey) + /// Derives a public key from a private key + pub fn from_private(sk: &ExtendedPrivKey) -> ExtendedPubKey { + secp256k1::init(); + ExtendedPubKey { + network: sk.network, + depth: sk.depth, + parent_fingerprint: sk.parent_fingerprint, + child_number: sk.child_number, + public_key: PublicKey::from_secret_key(&sk.secret_key, true), + chain_code: sk.chain_code } - } - ChildNumber::Normal(n) => { - let mut hmac = Hmac::new(Sha512::new(), &self.chain_code[..]); - hmac.input(&self.public_key[..]); - let mut be_n = [0; 32]; - write_u32_be(&mut be_n, n); - hmac.input(&be_n); - - let mut result = [0; 64]; - hmac.raw_result(&mut result); - - let sk = try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)); - let mut pk = self.public_key.clone(); - try!(pk.add_exp_assign(&sk).map_err(Error::Ecdsa)); - - Ok(ExtendedPubKey { - network: self.network, - depth: self.depth + 1, - parent_fingerprint: self.fingerprint(), - child_number: i, - public_key: pk, - chain_code: ChainCode::from_slice(&result[32..]) - }) - } } - } - /// Returns the HASH160 of the chaincode - pub fn identifier(&self) -> [u8; 20] { - let mut sha2_res = [0; 32]; - let mut ripemd_res = [0; 20]; - // Do SHA256 of just the ECDSA pubkey - let mut sha2 = Sha256::new(); - sha2.input(&self.public_key[..]); - sha2.result(&mut sha2_res); - // do RIPEMD160 - let mut ripemd = Ripemd160::new(); - ripemd.input(&sha2_res); - ripemd.result(&mut ripemd_res); - // Return - ripemd_res - } + /// Public->Public child key derivation + pub fn ckd_pub(&self, i: ChildNumber) -> Result { + match i { + ChildNumber::Hardened(n) => { + if n >= (1 << 31) { + Err(Error::InvalidChildNumber(i)) + } else { + Err(Error::CannotDeriveFromHardenedKey) + } + } + ChildNumber::Normal(n) => { + let mut hmac = Hmac::new(Sha512::new(), &self.chain_code[..]); + hmac.input(&self.public_key[..]); + let mut be_n = [0; 32]; + write_u32_be(&mut be_n, n); + hmac.input(&be_n); - /// Returns the first four bytes of the identifier - pub fn fingerprint(&self) -> Fingerprint { - Fingerprint::from_slice(&self.identifier()[0..4]) - } + let mut result = [0; 64]; + hmac.raw_result(&mut result); + + let sk = try!(SecretKey::from_slice(&result[..32]).map_err(Error::Ecdsa)); + let mut pk = self.public_key.clone(); + try!(pk.add_exp_assign(&sk).map_err(Error::Ecdsa)); + + Ok(ExtendedPubKey { + network: self.network, + depth: self.depth + 1, + parent_fingerprint: self.fingerprint(), + child_number: i, + public_key: pk, + chain_code: ChainCode::from_slice(&result[32..]) + }) + } + } + } + + /// Returns the HASH160 of the chaincode + pub fn identifier(&self) -> [u8; 20] { + let mut sha2_res = [0; 32]; + let mut ripemd_res = [0; 20]; + // Do SHA256 of just the ECDSA pubkey + let mut sha2 = Sha256::new(); + sha2.input(&self.public_key[..]); + sha2.result(&mut sha2_res); + // do RIPEMD160 + let mut ripemd = Ripemd160::new(); + ripemd.input(&sha2_res); + ripemd.result(&mut ripemd_res); + // Return + ripemd_res + } + + /// Returns the first four bytes of the identifier + pub fn fingerprint(&self) -> Fingerprint { + Fingerprint::from_slice(&self.identifier()[0..4]) + } } impl ToBase58 for ExtendedPrivKey { - fn base58_layout(&self) -> Vec { - let mut ret = Vec::with_capacity(78); - ret.push_all(match self.network { - Network::Bitcoin => &[0x04, 0x88, 0xAD, 0xE4], - Network::Testnet => &[0x04, 0x35, 0x83, 0x94] - }); - ret.push(self.depth as u8); - ret.push_all(&self.parent_fingerprint[..]); - let mut be_n = [0; 32]; - match self.child_number { - ChildNumber::Hardened(n) => { - write_u32_be(&mut be_n, n + (1 << 31)); - } - ChildNumber::Normal(n) => { - write_u32_be(&mut be_n, n); - } + fn base58_layout(&self) -> Vec { + let mut ret = Vec::with_capacity(78); + ret.push_all(match self.network { + Network::Bitcoin => &[0x04, 0x88, 0xAD, 0xE4], + Network::Testnet => &[0x04, 0x35, 0x83, 0x94] + }); + ret.push(self.depth as u8); + ret.push_all(&self.parent_fingerprint[..]); + let mut be_n = [0; 32]; + match self.child_number { + ChildNumber::Hardened(n) => { + write_u32_be(&mut be_n, n + (1 << 31)); + } + ChildNumber::Normal(n) => { + write_u32_be(&mut be_n, n); + } + } + ret.push_all(&be_n); + ret.push_all(&self.chain_code[..]); + ret.push(0); + ret.push_all(&self.secret_key[..]); + ret } - ret.push_all(&be_n); - ret.push_all(&self.chain_code[..]); - ret.push(0); - ret.push_all(&self.secret_key[..]); - ret - } } impl FromBase58 for ExtendedPrivKey { - fn from_base58_layout(data: Vec) -> Result { - if data.len() != 78 { - return Err(base58::Error::InvalidLength(data.len())); + fn from_base58_layout(data: Vec) -> Result { + if data.len() != 78 { + return Err(base58::Error::InvalidLength(data.len())); + } + + let cn_int = read_u32_be(&data[9..13]); + let child_number = if cn_int < (1 << 31) { ChildNumber::Normal(cn_int) } + else { ChildNumber::Hardened(cn_int - (1 << 31)) }; + + Ok(ExtendedPrivKey { + network: match &data[0..4] { + [0x04u8, 0x88, 0xAD, 0xE4] => Network::Bitcoin, + [0x04u8, 0x35, 0x83, 0x94] => Network::Testnet, + _ => { return Err(base58::Error::InvalidVersion((&data[0..4]).to_vec())); } + }, + depth: data[4], + parent_fingerprint: Fingerprint::from_slice(&data[5..9]), + child_number: child_number, + chain_code: ChainCode::from_slice(&data[13..45]), + secret_key: try!(SecretKey::from_slice( + &data[46..78]).map_err(|e| + base58::Error::Other(e.to_string()))) + }) } - - let cn_int = read_u32_be(&data[9..13]); - let child_number = if cn_int < (1 << 31) { ChildNumber::Normal(cn_int) } - else { ChildNumber::Hardened(cn_int - (1 << 31)) }; - - Ok(ExtendedPrivKey { - network: match &data[0..4] { - [0x04u8, 0x88, 0xAD, 0xE4] => Network::Bitcoin, - [0x04u8, 0x35, 0x83, 0x94] => Network::Testnet, - _ => { return Err(base58::Error::InvalidVersion((&data[0..4]).to_vec())); } - }, - depth: data[4], - parent_fingerprint: Fingerprint::from_slice(&data[5..9]), - child_number: child_number, - chain_code: ChainCode::from_slice(&data[13..45]), - secret_key: try!(SecretKey::from_slice( - &data[46..78]).map_err(|e| - base58::Error::Other(e.to_string()))) - }) - } } impl ToBase58 for ExtendedPubKey { - fn base58_layout(&self) -> Vec { - assert!(self.public_key.is_compressed()); - let mut ret = Vec::with_capacity(78); - ret.push_all(match self.network { - Network::Bitcoin => &[0x04u8, 0x88, 0xB2, 0x1E], - Network::Testnet => &[0x04u8, 0x35, 0x87, 0xCF] - }); - ret.push(self.depth as u8); - ret.push_all(&self.parent_fingerprint[..]); - let mut be_n = [0; 32]; - match self.child_number { - ChildNumber::Hardened(n) => { - write_u32_be(&mut be_n, n + (1 << 31)); - } - ChildNumber::Normal(n) => { - write_u32_be(&mut be_n, n); - } + fn base58_layout(&self) -> Vec { + assert!(self.public_key.is_compressed()); + let mut ret = Vec::with_capacity(78); + ret.push_all(match self.network { + Network::Bitcoin => &[0x04u8, 0x88, 0xB2, 0x1E], + Network::Testnet => &[0x04u8, 0x35, 0x87, 0xCF] + }); + ret.push(self.depth as u8); + ret.push_all(&self.parent_fingerprint[..]); + let mut be_n = [0; 32]; + match self.child_number { + ChildNumber::Hardened(n) => { + write_u32_be(&mut be_n, n + (1 << 31)); + } + ChildNumber::Normal(n) => { + write_u32_be(&mut be_n, n); + } + } + ret.push_all(&be_n); + ret.push_all(&self.chain_code[..]); + ret.push_all(&self.public_key[..]); + ret } - ret.push_all(&be_n); - ret.push_all(&self.chain_code[..]); - ret.push_all(&self.public_key[..]); - ret - } } impl FromBase58 for ExtendedPubKey { - fn from_base58_layout(data: Vec) -> Result { - if data.len() != 78 { - return Err(base58::Error::InvalidLength(data.len())); + fn from_base58_layout(data: Vec) -> Result { + if data.len() != 78 { + return Err(base58::Error::InvalidLength(data.len())); + } + + let cn_int = read_u32_be(&data[9..13]); + let child_number = if cn_int < (1 << 31) { ChildNumber::Normal(cn_int) } + else { ChildNumber::Hardened(cn_int - (1 << 31)) }; + + Ok(ExtendedPubKey { + network: match &data[0..4] { + [0x04, 0x88, 0xB2, 0x1E] => Network::Bitcoin, + [0x04, 0x35, 0x87, 0xCF] => Network::Testnet, + _ => { return Err(base58::Error::InvalidVersion((&data[0..4]).to_vec())); } + }, + depth: data[4], + parent_fingerprint: Fingerprint::from_slice(&data[5..9]), + child_number: child_number, + chain_code: ChainCode::from_slice(&data[13..45]), + public_key: try!(PublicKey::from_slice( + &data[45..78]).map_err(|e| + base58::Error::Other(e.to_string()))) + }) } - - let cn_int = read_u32_be(&data[9..13]); - let child_number = if cn_int < (1 << 31) { ChildNumber::Normal(cn_int) } - else { ChildNumber::Hardened(cn_int - (1 << 31)) }; - - Ok(ExtendedPubKey { - network: match &data[0..4] { - [0x04, 0x88, 0xB2, 0x1E] => Network::Bitcoin, - [0x04, 0x35, 0x87, 0xCF] => Network::Testnet, - _ => { return Err(base58::Error::InvalidVersion((&data[0..4]).to_vec())); } - }, - depth: data[4], - parent_fingerprint: Fingerprint::from_slice(&data[5..9]), - child_number: child_number, - chain_code: ChainCode::from_slice(&data[13..45]), - public_key: try!(PublicKey::from_slice( - &data[45..78]).map_err(|e| - base58::Error::Other(e.to_string()))) - }) - } } #[cfg(test)] mod tests { - use serialize::hex::FromHex; - use test::{Bencher, black_box}; + use serialize::hex::FromHex; + use test::{Bencher, black_box}; - use network::constants::Network::{self, Bitcoin}; - use util::base58::{FromBase58, ToBase58}; + use network::constants::Network::{self, Bitcoin}; + use util::base58::{FromBase58, ToBase58}; - use super::{ChildNumber, ExtendedPrivKey, ExtendedPubKey}; - use super::ChildNumber::{Hardened, Normal}; + use super::{ChildNumber, ExtendedPrivKey, ExtendedPubKey}; + use super::ChildNumber::{Hardened, Normal}; - fn test_path(network: Network, - seed: &[u8], - path: &[ChildNumber], - expected_sk: &str, - expected_pk: &str) { + fn test_path(network: Network, + seed: &[u8], + path: &[ChildNumber], + expected_sk: &str, + expected_pk: &str) { - let mut sk = ExtendedPrivKey::new_master(network, seed).unwrap(); - let mut pk = ExtendedPubKey::from_private(&sk); - // Derive keys, checking hardened and non-hardened derivation - for &num in path.iter() { - sk = sk.ckd_priv(num).unwrap(); - match num { - Normal(_) => { - let pk2 = pk.ckd_pub(num).unwrap(); - pk = ExtendedPubKey::from_private(&sk); - assert_eq!(pk, pk2); + let mut sk = ExtendedPrivKey::new_master(network, seed).unwrap(); + let mut pk = ExtendedPubKey::from_private(&sk); + // Derive keys, checking hardened and non-hardened derivation + for &num in path.iter() { + sk = sk.ckd_priv(num).unwrap(); + match num { + Normal(_) => { + let pk2 = pk.ckd_pub(num).unwrap(); + pk = ExtendedPubKey::from_private(&sk); + assert_eq!(pk, pk2); + } + Hardened(_) => { + pk = ExtendedPubKey::from_private(&sk); + } + } } - Hardened(_) => { - pk = ExtendedPubKey::from_private(&sk); - } - } + + // Check result against expected base58 + assert_eq!(&sk.to_base58check()[..], expected_sk); + assert_eq!(&pk.to_base58check()[..], expected_pk); + // Check decoded base58 against result + let decoded_sk = FromBase58::from_base58check(expected_sk); + let decoded_pk = FromBase58::from_base58check(expected_pk); + assert_eq!(Ok(sk), decoded_sk); + assert_eq!(Ok(pk), decoded_pk); } - // Check result against expected base58 - assert_eq!(&sk.to_base58check()[..], expected_sk); - assert_eq!(&pk.to_base58check()[..], expected_pk); - // Check decoded base58 against result - let decoded_sk = FromBase58::from_base58check(expected_sk); - let decoded_pk = FromBase58::from_base58check(expected_pk); - assert_eq!(Ok(sk), decoded_sk); - assert_eq!(Ok(pk), decoded_pk); - } + #[test] + fn test_vector_1() { + let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); + // m + test_path(Bitcoin, &seed, [], + "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi", + "xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8"); - #[test] - fn test_vector_1() { - let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); - // m - test_path(Bitcoin, &seed, [], - "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi", - "xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8"); - - // m/0h - test_path(Bitcoin, &seed, [Hardened(0)], - "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7", - "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw"); + // m/0h + test_path(Bitcoin, &seed, [Hardened(0)], + "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7", + "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw"); - // m/0h/1 - test_path(Bitcoin, &seed, [Hardened(0), Normal(1)], - "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs", - "xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ"); + // m/0h/1 + test_path(Bitcoin, &seed, [Hardened(0), Normal(1)], + "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs", + "xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ"); - // m/0h/1/2h - test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2)], - "xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM", - "xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5"); + // m/0h/1/2h + test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2)], + "xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM", + "xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5"); - // m/0h/1/2h/2 - test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2), Normal(2)], - "xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334", - "xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV"); + // m/0h/1/2h/2 + test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2), Normal(2)], + "xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334", + "xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV"); - // m/0h/1/2h/2/1000000000 - test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2), Normal(2), Normal(1000000000)], - "xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76", - "xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy"); - } + // m/0h/1/2h/2/1000000000 + test_path(Bitcoin, &seed, [Hardened(0), Normal(1), Hardened(2), Normal(2), Normal(1000000000)], + "xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76", + "xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy"); + } - #[test] - fn test_vector_2() { - let seed = "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542".from_hex().unwrap(); + #[test] + fn test_vector_2() { + let seed = "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542".from_hex().unwrap(); - // m - test_path(Bitcoin, &seed, [], - "xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U", - "xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB"); + // m + test_path(Bitcoin, &seed, [], + "xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U", + "xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB"); - // m/0 - test_path(Bitcoin, &seed, [Normal(0)], - "xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt", - "xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH"); + // m/0 + test_path(Bitcoin, &seed, [Normal(0)], + "xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt", + "xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH"); - // m/0/2147483647h - test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647)], - "xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9", - "xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDBrQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a"); + // m/0/2147483647h + test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647)], + "xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9", + "xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDBrQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a"); - // m/0/2147483647h/1 - test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1)], - "xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef", - "xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoNJxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon"); + // m/0/2147483647h/1 + test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1)], + "xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef", + "xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoNJxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon"); - // m/0/2147483647h/1/2147483646h - test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1), Hardened(2147483646)], - "xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc", - "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL"); + // m/0/2147483647h/1/2147483646h + test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1), Hardened(2147483646)], + "xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc", + "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL"); - // m/0/2147483647h/1/2147483646h/2 - test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1), Hardened(2147483646), Normal(2)], - "xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq38EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j", - "xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGsApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt"); - } + // m/0/2147483647h/1/2147483646h/2 + test_path(Bitcoin, &seed, [Normal(0), Hardened(2147483647), Normal(1), Hardened(2147483646), Normal(2)], + "xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq38EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j", + "xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGsApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt"); + } - #[test] - pub fn encode_decode_childnumber() { - use serialize::json; + #[test] + pub fn encode_decode_childnumber() { + use serialize::json; - let h1 = Hardened(1); - let n1 = Normal(1); + let h1 = Hardened(1); + let n1 = Normal(1); - let h1_str = json::encode(&h1); - let n1_str = json::encode(&n1); + let h1_str = json::encode(&h1); + let n1_str = json::encode(&n1); - assert!(h1 != n1); - assert!(h1_str != n1_str); + assert!(h1 != n1); + assert!(h1_str != n1_str); - let h1_dec = json::decode(&h1_str).unwrap(); - let n1_dec = json::decode(&n1_str).unwrap(); - assert_eq!(h1, h1_dec); - assert_eq!(n1, n1_dec); - } + let h1_dec = json::decode(&h1_str).unwrap(); + let n1_dec = json::decode(&n1_str).unwrap(); + assert_eq!(h1, h1_dec); + assert_eq!(n1, n1_dec); + } - #[bench] - pub fn generate_sequential_normal_children(bh: &mut Bencher) { - let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); - let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); - let mut i = 0; - bh.iter( || { - black_box(msk.ckd_priv(Normal(i))); - i += 1; - }) - } + #[bench] + pub fn generate_sequential_normal_children(bh: &mut Bencher) { + let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); + let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); + let mut i = 0; + bh.iter( || { + black_box(msk.ckd_priv(Normal(i))); + i += 1; + }) + } - #[bench] - pub fn generate_sequential_hardened_children(bh: &mut Bencher) { - let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); - let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); - let mut i = 0; - bh.iter( || { - black_box(msk.ckd_priv(Hardened(i))); - i += 1; - }) - } + #[bench] + pub fn generate_sequential_hardened_children(bh: &mut Bencher) { + let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); + let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); + let mut i = 0; + bh.iter( || { + black_box(msk.ckd_priv(Hardened(i))); + i += 1; + }) + } - #[bench] - pub fn generate_sequential_public_children(bh: &mut Bencher) { - let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); - let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); - let mpk = ExtendedPubKey::from_private(&msk); + #[bench] + pub fn generate_sequential_public_children(bh: &mut Bencher) { + let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); + let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); + let mpk = ExtendedPubKey::from_private(&msk); - let mut i = 0; - bh.iter( || { - black_box(mpk.ckd_pub(Normal(i))); - i += 1; - }) - } + let mut i = 0; + bh.iter( || { + black_box(mpk.ckd_pub(Normal(i))); + i += 1; + }) + } - #[bench] - pub fn generate_sequential_public_child_addresses(bh: &mut Bencher) { - use wallet::address::Address; + #[bench] + pub fn generate_sequential_public_child_addresses(bh: &mut Bencher) { + use wallet::address::Address; - let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); - let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); - let mpk = ExtendedPubKey::from_private(&msk); + let seed = "000102030405060708090a0b0c0d0e0f".from_hex().unwrap(); + let msk = ExtendedPrivKey::new_master(Bitcoin, &seed).unwrap(); + let mpk = ExtendedPubKey::from_private(&msk); - let mut i = 0; - bh.iter( || { - let epk = mpk.ckd_pub(Normal(i)).unwrap(); - black_box(Address::from_key(Bitcoin, &epk.public_key)); - i += 1; - }) - } + let mut i = 0; + bh.iter( || { + let epk = mpk.ckd_pub(Normal(i)).unwrap(); + black_box(Address::from_key(Bitcoin, &epk.public_key)); + i += 1; + }) + } } diff --git a/src/wallet/wallet.rs b/src/wallet/wallet.rs index 064ad804..940a1f29 100644 --- a/src/wallet/wallet.rs +++ b/src/wallet/wallet.rs @@ -1,6 +1,6 @@ // Rust Bitcoin Library // Written in 2014 by -// Andrew Poelstra +// Andrew Poelstra // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without @@ -33,243 +33,243 @@ use wallet::address_index::AddressIndex; /// A Wallet error #[derive(Clone, PartialEq, Eq, Debug)] pub enum Error { - /// Tried to lookup an account by name, but none was found - AccountNotFound, - /// Tried to add an account when one already exists with that name - DuplicateAccount, - /// An error occured in a BIP32 derivation - Bip32Error(bip32::Error), - /// Tried to use a wallet without an address index - NoAddressIndex + /// Tried to lookup an account by name, but none was found + AccountNotFound, + /// Tried to add an account when one already exists with that name + DuplicateAccount, + /// An error occured in a BIP32 derivation + Bip32Error(bip32::Error), + /// Tried to use a wallet without an address index + NoAddressIndex } /// Each account has two chains, as specified in BIP32 pub enum AccountChain { - /// Internal addresses are used within the wallet for change, etc, - /// and in principle every generated one will be used. - Internal, - /// External addresses are shared, and might not be used after generatation, - /// complicating recreating the whole wallet from seed. - External + /// Internal addresses are used within the wallet for change, etc, + /// and in principle every generated one will be used. + Internal, + /// External addresses are shared, and might not be used after generatation, + /// complicating recreating the whole wallet from seed. + External } /// An account #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub struct Account { - internal_path: Vec, - internal_used: Vec, - internal_next: u32, - external_path: Vec, - external_used: Vec, - external_next: u32 + internal_path: Vec, + internal_used: Vec, + internal_next: u32, + external_path: Vec, + external_used: Vec, + external_next: u32 } impl Default for Account { - fn default() -> Account { - Account { - internal_path: vec![Hardened(0), Normal(1)], - internal_used: vec![], - internal_next: 0, - external_path: vec![Hardened(0), Normal(0)], - external_used: vec![], - external_next: 0 + fn default() -> Account { + Account { + internal_path: vec![Hardened(0), Normal(1)], + internal_used: vec![], + internal_next: 0, + external_path: vec![Hardened(0), Normal(0)], + external_used: vec![], + external_next: 0 + } } - } } /// A wallet #[derive(Clone, PartialEq, Eq, Debug)] pub struct Wallet { - master: ExtendedPrivKey, - accounts: HashMap, - index: Option + master: ExtendedPrivKey, + accounts: HashMap, + index: Option } impl Serialize for Wallet { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: Serializer { - let len = self.accounts.len(); - try!(self.master.serialize(s)); - self.accounts.serialize(s) - } + fn serialize(&self, s: &mut S) -> Result<(), S::Error> + where S: Serializer { + let len = self.accounts.len(); + try!(self.master.serialize(s)); + self.accounts.serialize(s) + } } impl Deserialize for Wallet { - fn deserialize(d: &mut D) -> Result - where D: Deserializer { - Ok(Wallet { - master: try!(Deserialize::deserialize(d)), - accounts: try!(Deserialize::deserialize(d)), - index: None - }) - } + fn deserialize(d: &mut D) -> Result + where D: Deserializer { + Ok(Wallet { + master: try!(Deserialize::deserialize(d)), + accounts: try!(Deserialize::deserialize(d)), + index: None + }) + } } impl Wallet { - /// Creates a new wallet from a BIP32 seed - #[inline] - pub fn from_seed(network: Network, seed: &[u8]) -> Result { - let mut accounts = HashMap::new(); - accounts.insert(String::new(), Default::default()); + /// Creates a new wallet from a BIP32 seed + #[inline] + pub fn from_seed(network: Network, seed: &[u8]) -> Result { + let mut accounts = HashMap::new(); + accounts.insert(String::new(), Default::default()); - Ok(Wallet { - master: try!(ExtendedPrivKey::new_master(network, seed)), - accounts: accounts, - index: None - }) - } - - /// Creates the address index - #[inline] - pub fn build_index(&mut self, utxo_set: &UtxoSet) { - let new = AddressIndex::new(utxo_set, self); - self.index = Some(new); - } - - /// Accessor for the wallet's address index - #[inline] - pub fn index<'a>(&'a self) -> Option<&'a AddressIndex> { - self.index.as_ref() - } - - /// Mutable accessor for the wallet's address index - #[inline] - pub fn index_mut<'a>(&'a mut self) -> Option<&'a mut AddressIndex> { - self.index.as_mut() - } - - /// Adds an account to a wallet - pub fn account_insert(&mut self, name: String) - -> Result<(), Error> { - if self.accounts.contains_key(&name) { - return Err(Error::DuplicateAccount); + Ok(Wallet { + master: try!(ExtendedPrivKey::new_master(network, seed)), + accounts: accounts, + index: None + }) } - let idx = self.accounts.len() as u32; - self.accounts.insert(name, Account { - internal_path: vec![Hardened(idx), Normal(1)], - internal_used: vec![], - internal_next: 0, - external_path: vec![Hardened(idx), Normal(0)], - external_used: vec![], - external_next: 0 - }); - Ok(()) - } - - /// Locates an account in a wallet - #[inline] - pub fn account_get<'a>(&'a self, name: &str) -> Option<&'a Account> { - self.accounts.get(name) - } - - /// Create a new address - pub fn new_address(&mut self, - account: &str, - chain: AccountChain) - -> Result { - let account = self.accounts.get_mut(account); - let account = match account { Some(a) => a, None => return Err(Error::AccountNotFound) }; - let index = match self.index { Some(ref i) => i, None => return Err(Error::NoAddressIndex) }; - - let (mut i, master) = match chain { - Internal => (account.internal_next, - try!(ExtendedPrivKey::from_path( - &self.master, - account.internal_path.as_slice()).map_err(Error::Bip32Error))), - External => (account.external_next, - try!(ExtendedPrivKey::from_path( - &self.master, - account.external_path.as_slice()).map_err(Error::Bip32Error))), - }; - - // Scan for next admissible address - let mut sk = try!(master.ckd_priv(Normal(i)).map_err(Error::Bip32Error)); - let mut address = Address::from_key( - master.network, - &PublicKey::from_secret_key(&sk.secret_key, true)); - while !index.admissible_address(&address) { - i += 1; - sk = try!(master.ckd_priv(Normal(i)).map_err(Error::Bip32Error)); - address = Address::from_key( - master.network, - &PublicKey::from_secret_key(&sk.secret_key, true)); + /// Creates the address index + #[inline] + pub fn build_index(&mut self, utxo_set: &UtxoSet) { + let new = AddressIndex::new(utxo_set, self); + self.index = Some(new); } - match chain { - Internal => { - account.internal_used.push(Normal(i)); - account.internal_next = i + 1; - } - External => { - account.external_used.push(Normal(i)); - account.external_next = i + 1; - } + /// Accessor for the wallet's address index + #[inline] + pub fn index<'a>(&'a self) -> Option<&'a AddressIndex> { + self.index.as_ref() } - Ok(address) - } - - /// Returns the network of the wallet - #[inline] - pub fn network(&self) -> Network { - self.master.network - } - - /// Returns a key suitable for keying hash functions for DoS protection - #[inline] - pub fn siphash_key(&self) -> (u64, u64) { - (LittleEndian::read_u64(&self.master.chain_code[0..8]), - LittleEndian::read_u64(&self.master.chain_code[8..16])) - } - - /// Total balance - pub fn total_balance(&self) -> Result { - let mut ret = 0; - for (_, account) in self.accounts.iter() { - ret += try!(self.account_balance(account)); - } - Ok(ret) - } - - /// Account balance - pub fn balance(&self, account: &str) -> Result { - let account = self.accounts.get(account); - let account = match account { Some(a) => a, None => return Err(Error::AccountNotFound) }; - self.account_balance(account) - } - - fn account_balance(&self, account: &Account) -> Result { - let index = match self.index { Some(ref i) => i, None => return Err(Error::NoAddressIndex) }; - - let mut ret = 0; - - // Sum internal balance - let master = try!(ExtendedPrivKey::from_path( - &self.master, - account.internal_path.as_slice()).map_err(Error::Bip32Error)); - for &cnum in account.internal_used.iter() { - let sk = try!(master.ckd_priv(cnum).map_err(Error::Bip32Error)); - let pk = ExtendedPubKey::from_private(&sk); - let addr = Address::from_key(pk.network, &pk.public_key); - for out in index.find_by_script(&addr.script_pubkey()).iter() { - ret += out.txo.value; - } - } - // Sum external balance - let master = try!(ExtendedPrivKey::from_path( - &self.master, - account.external_path.as_slice()).map_err(Error::Bip32Error)); - for &cnum in account.external_used.iter() { - let sk = try!(master.ckd_priv(cnum).map_err(Error::Bip32Error)); - let pk = ExtendedPubKey::from_private(&sk); - let addr = Address::from_key(pk.network, &pk.public_key); - for out in index.find_by_script(&addr.script_pubkey()).iter() { - ret += out.txo.value; - } + /// Mutable accessor for the wallet's address index + #[inline] + pub fn index_mut<'a>(&'a mut self) -> Option<&'a mut AddressIndex> { + self.index.as_mut() } - Ok(ret) - } + /// Adds an account to a wallet + pub fn account_insert(&mut self, name: String) + -> Result<(), Error> { + if self.accounts.contains_key(&name) { + return Err(Error::DuplicateAccount); + } + + let idx = self.accounts.len() as u32; + self.accounts.insert(name, Account { + internal_path: vec![Hardened(idx), Normal(1)], + internal_used: vec![], + internal_next: 0, + external_path: vec![Hardened(idx), Normal(0)], + external_used: vec![], + external_next: 0 + }); + Ok(()) + } + + /// Locates an account in a wallet + #[inline] + pub fn account_get<'a>(&'a self, name: &str) -> Option<&'a Account> { + self.accounts.get(name) + } + + /// Create a new address + pub fn new_address(&mut self, + account: &str, + chain: AccountChain) + -> Result { + let account = self.accounts.get_mut(account); + let account = match account { Some(a) => a, None => return Err(Error::AccountNotFound) }; + let index = match self.index { Some(ref i) => i, None => return Err(Error::NoAddressIndex) }; + + let (mut i, master) = match chain { + Internal => (account.internal_next, + try!(ExtendedPrivKey::from_path( + &self.master, + account.internal_path.as_slice()).map_err(Error::Bip32Error))), + External => (account.external_next, + try!(ExtendedPrivKey::from_path( + &self.master, + account.external_path.as_slice()).map_err(Error::Bip32Error))), + }; + + // Scan for next admissible address + let mut sk = try!(master.ckd_priv(Normal(i)).map_err(Error::Bip32Error)); + let mut address = Address::from_key( + master.network, + &PublicKey::from_secret_key(&sk.secret_key, true)); + while !index.admissible_address(&address) { + i += 1; + sk = try!(master.ckd_priv(Normal(i)).map_err(Error::Bip32Error)); + address = Address::from_key( + master.network, + &PublicKey::from_secret_key(&sk.secret_key, true)); + } + + match chain { + Internal => { + account.internal_used.push(Normal(i)); + account.internal_next = i + 1; + } + External => { + account.external_used.push(Normal(i)); + account.external_next = i + 1; + } + } + + Ok(address) + } + + /// Returns the network of the wallet + #[inline] + pub fn network(&self) -> Network { + self.master.network + } + + /// Returns a key suitable for keying hash functions for DoS protection + #[inline] + pub fn siphash_key(&self) -> (u64, u64) { + (LittleEndian::read_u64(&self.master.chain_code[0..8]), + LittleEndian::read_u64(&self.master.chain_code[8..16])) + } + + /// Total balance + pub fn total_balance(&self) -> Result { + let mut ret = 0; + for (_, account) in self.accounts.iter() { + ret += try!(self.account_balance(account)); + } + Ok(ret) + } + + /// Account balance + pub fn balance(&self, account: &str) -> Result { + let account = self.accounts.get(account); + let account = match account { Some(a) => a, None => return Err(Error::AccountNotFound) }; + self.account_balance(account) + } + + fn account_balance(&self, account: &Account) -> Result { + let index = match self.index { Some(ref i) => i, None => return Err(Error::NoAddressIndex) }; + + let mut ret = 0; + + // Sum internal balance + let master = try!(ExtendedPrivKey::from_path( + &self.master, + account.internal_path.as_slice()).map_err(Error::Bip32Error)); + for &cnum in account.internal_used.iter() { + let sk = try!(master.ckd_priv(cnum).map_err(Error::Bip32Error)); + let pk = ExtendedPubKey::from_private(&sk); + let addr = Address::from_key(pk.network, &pk.public_key); + for out in index.find_by_script(&addr.script_pubkey()).iter() { + ret += out.txo.value; + } + } + // Sum external balance + let master = try!(ExtendedPrivKey::from_path( + &self.master, + account.external_path.as_slice()).map_err(Error::Bip32Error)); + for &cnum in account.external_used.iter() { + let sk = try!(master.ckd_priv(cnum).map_err(Error::Bip32Error)); + let pk = ExtendedPubKey::from_private(&sk); + let addr = Address::from_key(pk.network, &pk.public_key); + for out in index.find_by_script(&addr.script_pubkey()).iter() { + ret += out.txo.value; + } + } + + Ok(ret) + } }