Merge rust-bitcoin/rust-bitcoin#3486: Remove the I/O error variant when `consensus::deserialize`'ing

bbffa3db43 Remove the IO error from DecodeError (Tobin C. Harding)
713196be0d Return DeserError from encode::deserialize (Tobin C. Harding)
33566ac58c Split encode::Error into two parts (Tobin C. Harding)
b04142c745 Add encode::Error::MissingData variant (Tobin C. Harding)
5a42ef2850 Do not manually map IO error (Tobin C. Harding)
efd7f9f06c Add error constructor parse_failed_error (Tobin C. Harding)
ebfef3f114 Return generic error as Some (Tobin C. Harding)
a6254212dc Move consensus error code to submodule (Tobin C. Harding)

Pull request description:

  The `consensus::deserialize` and `consensus::deserilaize_partial` functions should not return an I/O error. Doing so causes various other error types to include an `io::Error` and the `io::Error` is an annoying type to work with.

  This PR is a bunch of steps, and it took me a good while with quite a bit of backtracking to get here. As such you may want to review the final state before looking at each patch.

  The `consensus` errors can be further cleaned up but I'd prefer not to spend more time on this unless it has some chance of merging.

ACKs for top commit:
  apoelstra:
    ACK bbffa3db43802b30d23259c0372f16a877a0ef8b; successfully ran local tests

Tree-SHA512: 522fdd29638a214cb7fcee29dd3b9f5c846f041fba087a56a91b83e6d85f033cbed95f659dc4321cd4596943ff233bdd184cdfbfcc787fe89172bb93aa4ab186
This commit is contained in:
merge-script 2024-10-23 14:25:55 +00:00
commit c7fbebbad8
No known key found for this signature in database
GPG Key ID: C588D63CE41B97C1
14 changed files with 383 additions and 241 deletions

View File

@ -16,7 +16,7 @@ use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt};
use crate::internal_macros::{impl_array_newtype_stringify, impl_consensus_encoding}; use crate::internal_macros::{impl_array_newtype_stringify, impl_consensus_encoding};
use crate::prelude::Vec; use crate::prelude::Vec;
use crate::transaction::TxIdentifier; use crate::transaction::TxIdentifier;
use crate::{block, Block, BlockHash, Transaction}; use crate::{block, consensus, Block, BlockHash, Transaction};
/// A BIP-152 error /// A BIP-152 error
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -84,8 +84,9 @@ impl Decodable for PrefilledTransaction {
#[inline] #[inline]
fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<Self, encode::Error> { fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<Self, encode::Error> {
let idx = r.read_compact_size()?; let idx = r.read_compact_size()?;
let idx = u16::try_from(idx) let idx = u16::try_from(idx).map_err(|_| {
.map_err(|_| encode::Error::ParseFailed("BIP152 prefilled tx index out of bounds"))?; consensus::parse_failed_error("BIP152 prefilled tx index out of bounds")
})?;
let tx = Transaction::consensus_decode(r)?; let tx = Transaction::consensus_decode(r)?;
Ok(PrefilledTransaction { idx, tx }) Ok(PrefilledTransaction { idx, tx })
} }
@ -172,7 +173,7 @@ impl Decodable for HeaderAndShortIds {
}; };
match header_short_ids.short_ids.len().checked_add(header_short_ids.prefilled_txs.len()) { match header_short_ids.short_ids.len().checked_add(header_short_ids.prefilled_txs.len()) {
Some(x) if x <= u16::MAX.into() => Ok(header_short_ids), Some(x) if x <= u16::MAX.into() => Ok(header_short_ids),
_ => Err(encode::Error::ParseFailed("indexes overflowed 16 bits")), _ => Err(consensus::parse_failed_error("indexes overflowed 16 bits")),
} }
} }
} }
@ -312,12 +313,13 @@ impl Decodable for BlockTransactionsRequest {
// transactions that would be allowed in a vector. // transactions that would be allowed in a vector.
let byte_size = nb_indexes let byte_size = nb_indexes
.checked_mul(mem::size_of::<Transaction>()) .checked_mul(mem::size_of::<Transaction>())
.ok_or(encode::Error::ParseFailed("invalid length"))?; .ok_or(consensus::parse_failed_error("invalid length"))?;
if byte_size > encode::MAX_VEC_SIZE { if byte_size > encode::MAX_VEC_SIZE {
return Err(encode::Error::OversizedVectorAllocation { return Err(encode::ParseError::OversizedVectorAllocation {
requested: byte_size, requested: byte_size,
max: encode::MAX_VEC_SIZE, max: encode::MAX_VEC_SIZE,
}); }
.into());
} }
let mut indexes = Vec::with_capacity(nb_indexes); let mut indexes = Vec::with_capacity(nb_indexes);
@ -326,12 +328,12 @@ impl Decodable for BlockTransactionsRequest {
let differential = r.read_compact_size()?; let differential = r.read_compact_size()?;
last_index = match last_index.checked_add(differential) { last_index = match last_index.checked_add(differential) {
Some(i) => i, Some(i) => i,
None => return Err(encode::Error::ParseFailed("block index overflow")), None => return Err(consensus::parse_failed_error("block index overflow")),
}; };
indexes.push(last_index); indexes.push(last_index);
last_index = match last_index.checked_add(1) { last_index = match last_index.checked_add(1) {
Some(i) => i, Some(i) => i,
None => return Err(encode::Error::ParseFailed("block index overflow")), None => return Err(consensus::parse_failed_error("block index overflow")),
}; };
} }
indexes indexes

View File

@ -20,7 +20,7 @@ use io::{BufRead, Write};
use primitives::Sequence; use primitives::Sequence;
use super::Weight; use super::Weight;
use crate::consensus::{encode, Decodable, Encodable}; use crate::consensus::{self, encode, Decodable, Encodable};
use crate::internal_macros::{impl_consensus_encoding, impl_hashencode}; use crate::internal_macros::{impl_consensus_encoding, impl_hashencode};
use crate::locktime::absolute::{self, Height, Time}; use crate::locktime::absolute::{self, Height, Time};
use crate::prelude::{Borrow, Vec}; use crate::prelude::{Borrow, Vec};
@ -894,7 +894,9 @@ impl Decodable for Transaction {
txin.witness = Decodable::consensus_decode_from_finite_reader(r)?; txin.witness = Decodable::consensus_decode_from_finite_reader(r)?;
} }
if !input.is_empty() && input.iter().all(|input| input.witness.is_empty()) { if !input.is_empty() && input.iter().all(|input| input.witness.is_empty()) {
Err(encode::Error::ParseFailed("witness flag set but no witnesses present")) Err(consensus::parse_failed_error(
"witness flag set but no witnesses present",
))
} else { } else {
Ok(Transaction { Ok(Transaction {
version, version,
@ -905,7 +907,7 @@ impl Decodable for Transaction {
} }
} }
// We don't support anything else // We don't support anything else
x => Err(encode::Error::UnsupportedSegwitFlag(x)), x => Err(encode::ParseError::UnsupportedSegwitFlag(x).into()),
} }
// non-segwit // non-segwit
} else { } else {
@ -1487,7 +1489,7 @@ mod tests {
let tx_bytes = hex!("0000fd000001021921212121212121212121f8b372b0239cc1dff600000000004f4f4f4f4f4f4f4f000000000000000000000000000000333732343133380d000000000000000000000000000000ff000000000009000dff000000000000000800000000000000000d"); let tx_bytes = hex!("0000fd000001021921212121212121212121f8b372b0239cc1dff600000000004f4f4f4f4f4f4f4f000000000000000000000000000000333732343133380d000000000000000000000000000000ff000000000009000dff000000000000000800000000000000000d");
let tx: Result<Transaction, _> = deserialize(&tx_bytes); let tx: Result<Transaction, _> = deserialize(&tx_bytes);
assert!(tx.is_err()); assert!(tx.is_err());
assert!(tx.unwrap_err().to_string().contains("witness flag set but no witnesses present")); assert!(matches!(tx.unwrap_err(), crate::consensus::DeserializeError::Parse(_)));
} }
#[test] #[test]

View File

@ -26,10 +26,11 @@ impl Decodable for Witness {
// Minimum size of witness element is 1 byte, so if the count is // Minimum size of witness element is 1 byte, so if the count is
// greater than MAX_VEC_SIZE we must return an error. // greater than MAX_VEC_SIZE we must return an error.
if witness_elements > MAX_VEC_SIZE { if witness_elements > MAX_VEC_SIZE {
return Err(self::Error::OversizedVectorAllocation { return Err(encode::ParseError::OversizedVectorAllocation {
requested: witness_elements, requested: witness_elements,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
}); }
.into());
} }
if witness_elements == 0 { if witness_elements == 0 {
Ok(Witness::default()) Ok(Witness::default())
@ -48,21 +49,26 @@ impl Decodable for Witness {
let element_size_len = compact_size::encoded_size(element_size); let element_size_len = compact_size::encoded_size(element_size);
let required_len = cursor let required_len = cursor
.checked_add(element_size) .checked_add(element_size)
.ok_or(self::Error::OversizedVectorAllocation { .ok_or(encode::Error::Parse(
encode::ParseError::OversizedVectorAllocation {
requested: usize::MAX, requested: usize::MAX,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
})? },
))?
.checked_add(element_size_len) .checked_add(element_size_len)
.ok_or(self::Error::OversizedVectorAllocation { .ok_or(encode::Error::Parse(
encode::ParseError::OversizedVectorAllocation {
requested: usize::MAX, requested: usize::MAX,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
})?; },
))?;
if required_len > MAX_VEC_SIZE + witness_index_space { if required_len > MAX_VEC_SIZE + witness_index_space {
return Err(self::Error::OversizedVectorAllocation { return Err(encode::ParseError::OversizedVectorAllocation {
requested: required_len, requested: required_len,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
}); }
.into());
} }
// We will do content.rotate_left(witness_index_space) later. // We will do content.rotate_left(witness_index_space) later.

View File

@ -14,132 +14,29 @@
//! scripts come with an opcode decode, hashes are big-endian, numbers are //! scripts come with an opcode decode, hashes are big-endian, numbers are
//! typically big-endian decimals, etc.) //! typically big-endian decimals, etc.)
use core::{fmt, mem}; use core::mem;
use hashes::{sha256, sha256d, GeneralHash, Hash}; use hashes::{sha256, sha256d, GeneralHash, Hash};
use hex::error::{InvalidCharError, OddLengthStringError}; use hex::DisplayHex as _;
use internals::{compact_size, write_err, ToU64}; use internals::{compact_size, ToU64};
use io::{BufRead, Cursor, Read, Write}; use io::{BufRead, Cursor, Read, Write};
use super::IterReader;
use crate::bip152::{PrefilledTransaction, ShortId}; use crate::bip152::{PrefilledTransaction, ShortId};
use crate::bip158::{FilterHash, FilterHeader}; use crate::bip158::{FilterHash, FilterHeader};
use crate::block::{self, BlockHash}; use crate::block::{self, BlockHash};
use crate::consensus::{DecodeError, IterReader};
use crate::merkle_tree::TxMerkleNode; use crate::merkle_tree::TxMerkleNode;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use crate::p2p::{ use crate::p2p::{
address::{AddrV2Message, Address}, address::{AddrV2Message, Address},
message_blockdata::Inventory, message_blockdata::Inventory,
}; };
use crate::prelude::{rc, sync, Box, Cow, DisplayHex, String, Vec}; use crate::prelude::{rc, sync, Box, Cow, String, Vec};
use crate::taproot::TapLeafHash; use crate::taproot::TapLeafHash;
use crate::transaction::{Transaction, TxIn, TxOut}; use crate::transaction::{Transaction, TxIn, TxOut};
/// Encoding error. #[rustfmt::skip] // Keep public re-exports separate.
#[derive(Debug)] pub use super::{Error, FromHexError, ParseError, DeserializeError};
#[non_exhaustive]
pub enum Error {
/// And I/O error.
Io(io::Error),
/// Tried to allocate an oversized vector.
OversizedVectorAllocation {
/// The capacity requested.
requested: usize,
/// The maximum capacity.
max: usize,
},
/// Checksum was invalid.
InvalidChecksum {
/// The expected checksum.
expected: [u8; 4],
/// The invalid checksum.
actual: [u8; 4],
},
/// VarInt was encoded in a non-minimal way.
NonMinimalVarInt,
/// Parsing error.
ParseFailed(&'static str),
/// Unsupported Segwit flag.
UnsupportedSegwitFlag(u8),
}
internals::impl_from_infallible!(Error);
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Error::*;
match *self {
Io(ref e) => write_err!(f, "IO error"; e),
OversizedVectorAllocation { requested: ref r, max: ref m } =>
write!(f, "allocation of oversized vector: requested {}, maximum {}", r, m),
InvalidChecksum { expected: ref e, actual: ref a } =>
write!(f, "invalid checksum: expected {:x}, actual {:x}", e.as_hex(), a.as_hex()),
NonMinimalVarInt => write!(f, "non-minimal varint"),
ParseFailed(ref s) => write!(f, "parse failed: {}", s),
UnsupportedSegwitFlag(ref swflag) =>
write!(f, "unsupported segwit version: {}", swflag),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use Error::*;
match self {
Io(e) => Some(e),
OversizedVectorAllocation { .. }
| InvalidChecksum { .. }
| NonMinimalVarInt
| ParseFailed(_)
| UnsupportedSegwitFlag(_) => None,
}
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self { Error::Io(error) }
}
/// Hex deserialization error.
#[derive(Debug)]
pub enum FromHexError {
/// Purported hex string had odd length.
OddLengthString(OddLengthStringError),
/// Decoding error.
Decode(DecodeError<InvalidCharError>),
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use FromHexError::*;
match *self {
OddLengthString(ref e) =>
write_err!(f, "odd length, failed to create bytes from hex"; e),
Decode(ref e) => write_err!(f, "decoding error"; e),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for FromHexError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use FromHexError::*;
match *self {
OddLengthString(ref e) => Some(e),
Decode(ref e) => Some(e),
}
}
}
impl From<OddLengthStringError> for FromHexError {
#[inline]
fn from(e: OddLengthStringError) -> Self { Self::OddLengthString(e) }
}
/// Encodes an object into a vector. /// Encodes an object into a vector.
pub fn serialize<T: Encodable + ?Sized>(data: &T) -> Vec<u8> { pub fn serialize<T: Encodable + ?Sized>(data: &T) -> Vec<u8> {
@ -156,14 +53,14 @@ pub fn serialize_hex<T: Encodable + ?Sized>(data: &T) -> String {
/// Deserializes an object from a vector, will error if said deserialization /// Deserializes an object from a vector, will error if said deserialization
/// doesn't consume the entire vector. /// doesn't consume the entire vector.
pub fn deserialize<T: Decodable>(data: &[u8]) -> Result<T, Error> { pub fn deserialize<T: Decodable>(data: &[u8]) -> Result<T, DeserializeError> {
let (rv, consumed) = deserialize_partial(data)?; let (rv, consumed) = deserialize_partial(data)?;
// Fail if data are not consumed entirely. // Fail if data are not consumed entirely.
if consumed == data.len() { if consumed == data.len() {
Ok(rv) Ok(rv)
} else { } else {
Err(Error::ParseFailed("data not consumed entirely when explicitly deserializing")) Err(DeserializeError::Unconsumed)
} }
} }
@ -177,9 +74,15 @@ pub fn deserialize_hex<T: Decodable>(hex: &str) -> Result<T, FromHexError> {
/// Deserializes an object from a vector, but will not report an error if said deserialization /// Deserializes an object from a vector, but will not report an error if said deserialization
/// doesn't consume the entire vector. /// doesn't consume the entire vector.
pub fn deserialize_partial<T: Decodable>(data: &[u8]) -> Result<(T, usize), Error> { pub fn deserialize_partial<T: Decodable>(data: &[u8]) -> Result<(T, usize), ParseError> {
let mut decoder = Cursor::new(data); let mut decoder = Cursor::new(data);
let rv = Decodable::consensus_decode_from_finite_reader(&mut decoder)?;
let rv = match Decodable::consensus_decode_from_finite_reader(&mut decoder) {
Ok(rv) => rv,
Err(Error::Parse(e)) => return Err(e),
Err(Error::Io(_)) =>
unreachable!("consensus_decode code never returns an I/O error for in-memory reads"),
};
let consumed = decoder.position() as usize; let consumed = decoder.position() as usize;
Ok((rv, consumed)) Ok((rv, consumed))
@ -263,7 +166,7 @@ macro_rules! decoder_fn {
#[inline] #[inline]
fn $name(&mut self) -> core::result::Result<$val_type, Error> { fn $name(&mut self) -> core::result::Result<$val_type, Error> {
let mut val = [0; $byte_len]; let mut val = [0; $byte_len];
self.read_exact(&mut val[..]).map_err(Error::Io)?; self.read_exact(&mut val[..])?;
Ok(<$val_type>::from_le_bytes(val)) Ok(<$val_type>::from_le_bytes(val))
} }
}; };
@ -319,9 +222,7 @@ impl<R: Read + ?Sized> ReadExt for R {
#[inline] #[inline]
fn read_bool(&mut self) -> Result<bool, Error> { ReadExt::read_i8(self).map(|bit| bit != 0) } fn read_bool(&mut self) -> Result<bool, Error> { ReadExt::read_i8(self).map(|bit| bit != 0) }
#[inline] #[inline]
fn read_slice(&mut self, slice: &mut [u8]) -> Result<(), Error> { fn read_slice(&mut self, slice: &mut [u8]) -> Result<(), Error> { Ok(self.read_exact(slice)?) }
self.read_exact(slice).map_err(Error::Io)
}
#[inline] #[inline]
#[rustfmt::skip] // Formatter munges code comments below. #[rustfmt::skip] // Formatter munges code comments below.
fn read_compact_size(&mut self) -> Result<u64, Error> { fn read_compact_size(&mut self) -> Result<u64, Error> {
@ -329,7 +230,7 @@ impl<R: Read + ?Sized> ReadExt for R {
0xFF => { 0xFF => {
let x = self.read_u64()?; let x = self.read_u64()?;
if x < 0x1_0000_0000 { // I.e., would have fit in a `u32`. if x < 0x1_0000_0000 { // I.e., would have fit in a `u32`.
Err(Error::NonMinimalVarInt) Err(ParseError::NonMinimalVarInt.into())
} else { } else {
Ok(x) Ok(x)
} }
@ -337,7 +238,7 @@ impl<R: Read + ?Sized> ReadExt for R {
0xFE => { 0xFE => {
let x = self.read_u32()?; let x = self.read_u32()?;
if x < 0x1_0000 { // I.e., would have fit in a `u16`. if x < 0x1_0000 { // I.e., would have fit in a `u16`.
Err(Error::NonMinimalVarInt) Err(ParseError::NonMinimalVarInt.into())
} else { } else {
Ok(x as u64) Ok(x as u64)
} }
@ -345,7 +246,7 @@ impl<R: Read + ?Sized> ReadExt for R {
0xFD => { 0xFD => {
let x = self.read_u16()?; let x = self.read_u16()?;
if x < 0xFD { // Could have been encoded as a `u8`. if x < 0xFD { // Could have been encoded as a `u8`.
Err(Error::NonMinimalVarInt) Err(ParseError::NonMinimalVarInt.into())
} else { } else {
Ok(x as u64) Ok(x as u64)
} }
@ -521,7 +422,7 @@ impl Decodable for String {
#[inline] #[inline]
fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<String, Error> { fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<String, Error> {
String::from_utf8(Decodable::consensus_decode(r)?) String::from_utf8(Decodable::consensus_decode(r)?)
.map_err(|_| self::Error::ParseFailed("String was not valid UTF8")) .map_err(|_| super::parse_failed_error("String was not valid UTF8"))
} }
} }
@ -536,7 +437,7 @@ impl Decodable for Cow<'static, str> {
#[inline] #[inline]
fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<Cow<'static, str>, Error> { fn consensus_decode<R: BufRead + ?Sized>(r: &mut R) -> Result<Cow<'static, str>, Error> {
String::from_utf8(Decodable::consensus_decode(r)?) String::from_utf8(Decodable::consensus_decode(r)?)
.map_err(|_| self::Error::ParseFailed("String was not valid UTF8")) .map_err(|_| super::parse_failed_error("String was not valid UTF8"))
.map(Cow::Owned) .map(Cow::Owned)
} }
} }
@ -754,7 +655,8 @@ impl Decodable for CheckedData {
let data = read_bytes_from_finite_reader(r, opts)?; let data = read_bytes_from_finite_reader(r, opts)?;
let expected_checksum = sha2_checksum(&data); let expected_checksum = sha2_checksum(&data);
if expected_checksum != checksum { if expected_checksum != checksum {
Err(self::Error::InvalidChecksum { expected: expected_checksum, actual: checksum }) Err(ParseError::InvalidChecksum { expected: expected_checksum, actual: checksum }
.into())
} else { } else {
Ok(CheckedData { data, checksum }) Ok(CheckedData { data, checksum })
} }
@ -859,6 +761,7 @@ impl Decodable for TapLeafHash {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use core::fmt;
use core::mem::discriminant; use core::mem::discriminant;
use super::*; use super::*;
@ -962,50 +865,50 @@ mod tests {
discriminant( discriminant(
&test_varint_encode(0xFF, &(0x100000000_u64 - 1).to_le_bytes()).unwrap_err() &test_varint_encode(0xFF, &(0x100000000_u64 - 1).to_le_bytes()).unwrap_err()
), ),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&test_varint_encode(0xFE, &(0x10000_u64 - 1).to_le_bytes()).unwrap_err()), discriminant(&test_varint_encode(0xFE, &(0x10000_u64 - 1).to_le_bytes()).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&test_varint_encode(0xFD, &(0xFD_u64 - 1).to_le_bytes()).unwrap_err()), discriminant(&test_varint_encode(0xFD, &(0xFD_u64 - 1).to_le_bytes()).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0x00, 0x00]).unwrap_err()), discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0x00, 0x00]).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0xfc, 0x00]).unwrap_err()), discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0xfc, 0x00]).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0xfc, 0x00]).unwrap_err()), discriminant(&deserialize::<Vec<u8>>(&[0xfd, 0xfc, 0x00]).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&deserialize::<Vec<u8>>(&[0xfe, 0xff, 0x00, 0x00, 0x00]).unwrap_err()), discriminant(&deserialize::<Vec<u8>>(&[0xfe, 0xff, 0x00, 0x00, 0x00]).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant(&deserialize::<Vec<u8>>(&[0xfe, 0xff, 0xff, 0x00, 0x00]).unwrap_err()), discriminant(&deserialize::<Vec<u8>>(&[0xfe, 0xff, 0xff, 0x00, 0x00]).unwrap_err()),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant( discriminant(
&deserialize::<Vec<u8>>(&[0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]) &deserialize::<Vec<u8>>(&[0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
.unwrap_err() .unwrap_err()
), ),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
assert_eq!( assert_eq!(
discriminant( discriminant(
&deserialize::<Vec<u8>>(&[0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00]) &deserialize::<Vec<u8>>(&[0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00])
.unwrap_err() .unwrap_err()
), ),
discriminant(&Error::NonMinimalVarInt) discriminant(&ParseError::NonMinimalVarInt.into())
); );
let mut vec_256 = vec![0; 259]; let mut vec_256 = vec![0; 259];
@ -1131,13 +1034,11 @@ mod tests {
]) ])
.is_err()); .is_err());
let rand_io_err = Error::Io(io::Error::new(io::ErrorKind::Other, ""));
// Check serialization that `if len > MAX_VEC_SIZE {return err}` isn't inclusive, // Check serialization that `if len > MAX_VEC_SIZE {return err}` isn't inclusive,
// by making sure it fails with IO Error and not an `OversizedVectorAllocation` Error. // by making sure it fails with `MissingData` and not an `OversizedVectorAllocation` Error.
let err = let err =
deserialize::<CheckedData>(&serialize(&(super::MAX_VEC_SIZE as u32))).unwrap_err(); deserialize::<CheckedData>(&serialize(&(super::MAX_VEC_SIZE as u32))).unwrap_err();
assert_eq!(discriminant(&err), discriminant(&rand_io_err)); assert_eq!(err, DeserializeError::Parse(ParseError::MissingData));
test_len_is_max_vec::<u8>(); test_len_is_max_vec::<u8>();
test_len_is_max_vec::<BlockHash>(); test_len_is_max_vec::<BlockHash>();
@ -1159,11 +1060,10 @@ mod tests {
Vec<T>: Decodable, Vec<T>: Decodable,
T: fmt::Debug, T: fmt::Debug,
{ {
let rand_io_err = Error::Io(io::Error::new(io::ErrorKind::Other, ""));
let mut buf = Vec::new(); let mut buf = Vec::new();
buf.emit_compact_size(super::MAX_VEC_SIZE / mem::size_of::<T>()).unwrap(); buf.emit_compact_size(super::MAX_VEC_SIZE / mem::size_of::<T>()).unwrap();
let err = deserialize::<Vec<T>>(&buf).unwrap_err(); let err = deserialize::<Vec<T>>(&buf).unwrap_err();
assert_eq!(discriminant(&err), discriminant(&rand_io_err)); assert_eq!(err, DeserializeError::Parse(ParseError::MissingData));
} }
#[test] #[test]
@ -1264,7 +1164,7 @@ mod tests {
hex.push_str("abcdef"); hex.push_str("abcdef");
assert!(matches!( assert!(matches!(
deserialize_hex::<Transaction>(&hex).unwrap_err(), deserialize_hex::<Transaction>(&hex).unwrap_err(),
FromHexError::Decode(DecodeError::TooManyBytes) FromHexError::Decode(DecodeError::Unconsumed)
)); ));
} }
} }

View File

@ -0,0 +1,250 @@
// SPDX-License-Identifier: CC0-1.0
//! Consensus encoding errors.
use core::fmt;
use hex::error::{InvalidCharError, OddLengthStringError};
use hex::DisplayHex as _;
use internals::write_err;
#[cfg(doc)]
use super::IterReader;
/// Error deserializing from a slice.
#[derive(Debug, PartialEq, Eq)]
#[non_exhaustive]
pub enum DeserializeError {
/// Error parsing encoded object.
Parse(ParseError),
/// Data unconsumed error.
Unconsumed,
}
internals::impl_from_infallible!(DeserializeError);
impl fmt::Display for DeserializeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use DeserializeError::*;
match *self {
Parse(ref e) => write_err!(f, "error parsing encoded object"; e),
Unconsumed => write!(f, "data not consumed entirely when deserializing"),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for DeserializeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use DeserializeError::*;
match *self {
Parse(ref e) => Some(e),
Unconsumed => None,
}
}
}
impl From<ParseError> for DeserializeError {
fn from(e: ParseError) -> Self { Self::Parse(e) }
}
/// Error when consensus decoding from an `[IterReader]`.
///
/// This is the same as a `DeserializeError` with an additional variant to return any error yealded
/// by the inner bytes iterator.
#[derive(Debug)]
pub enum DecodeError<E> {
/// Invalid consensus encoding.
Parse(ParseError),
/// Data unconsumed error.
Unconsumed,
/// Other decoding error.
Other(E), // Yielded by the inner iterator.
}
internals::impl_from_infallible!(DecodeError<E>);
impl<E: fmt::Debug> fmt::Display for DecodeError<E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use DecodeError::*;
match *self {
Parse(ref e) => write_err!(f, "error parsing encoded object"; e),
Unconsumed => write!(f, "data not consumed entirely when deserializing"),
Other(ref other) => write!(f, "other decoding error: {:?}", other),
}
}
}
#[cfg(feature = "std")]
impl<E: fmt::Debug + std::error::Error + 'static> std::error::Error for DecodeError<E> {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use DecodeError::*;
match *self {
Parse(ref e) => Some(e),
Unconsumed => None,
Other(ref e) => Some(e),
}
}
}
/// Encoding error.
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
/// And I/O error.
Io(io::Error),
/// Error parsing encoded object.
Parse(ParseError),
}
internals::impl_from_infallible!(Error);
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Error::*;
match *self {
Io(ref e) => write_err!(f, "IO error"; e),
Parse(ref e) => write_err!(f, "error parsing encoded object"; e),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use Error::*;
match *self {
Io(ref e) => Some(e),
Parse(ref e) => Some(e),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
use io::ErrorKind;
match e.kind() {
ErrorKind::UnexpectedEof => Error::Parse(ParseError::MissingData),
_ => Error::Io(e),
}
}
}
impl From<ParseError> for Error {
fn from(e: ParseError) -> Self { Error::Parse(e) }
}
/// Encoding is invalid.
#[derive(Debug, PartialEq, Eq)]
#[non_exhaustive]
pub enum ParseError {
/// Missing data (early end of file or slice too short).
MissingData, // TODO: Can we add more context?
/// Tried to allocate an oversized vector.
OversizedVectorAllocation {
/// The capacity requested.
requested: usize,
/// The maximum capacity.
max: usize,
},
/// Checksum was invalid.
InvalidChecksum {
/// The expected checksum.
expected: [u8; 4],
/// The invalid checksum.
actual: [u8; 4],
},
/// VarInt was encoded in a non-minimal way.
NonMinimalVarInt,
/// Parsing error.
ParseFailed(&'static str),
/// Unsupported Segwit flag.
UnsupportedSegwitFlag(u8),
}
internals::impl_from_infallible!(ParseError);
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use ParseError::*;
match *self {
MissingData => write!(f, "missing data (early end of file or slice too short)"),
OversizedVectorAllocation { requested: ref r, max: ref m } =>
write!(f, "allocation of oversized vector: requested {}, maximum {}", r, m),
InvalidChecksum { expected: ref e, actual: ref a } =>
write!(f, "invalid checksum: expected {:x}, actual {:x}", e.as_hex(), a.as_hex()),
NonMinimalVarInt => write!(f, "non-minimal varint"),
ParseFailed(ref s) => write!(f, "parse failed: {}", s),
UnsupportedSegwitFlag(ref swflag) =>
write!(f, "unsupported segwit version: {}", swflag),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for ParseError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use ParseError::*;
match self {
MissingData
| OversizedVectorAllocation { .. }
| InvalidChecksum { .. }
| NonMinimalVarInt
| ParseFailed(_)
| UnsupportedSegwitFlag(_) => None,
}
}
}
/// Hex deserialization error.
#[derive(Debug)]
pub enum FromHexError {
/// Purported hex string had odd length.
OddLengthString(OddLengthStringError),
/// Decoding error.
Decode(DecodeError<InvalidCharError>),
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use FromHexError::*;
match *self {
OddLengthString(ref e) =>
write_err!(f, "odd length, failed to create bytes from hex"; e),
Decode(ref e) => write_err!(f, "decoding error"; e),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for FromHexError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use FromHexError::*;
match *self {
OddLengthString(ref e) => Some(e),
Decode(ref e) => Some(e),
}
}
}
impl From<OddLengthStringError> for FromHexError {
#[inline]
fn from(e: OddLengthStringError) -> Self { Self::OddLengthString(e) }
}
/// Constructs a `Error::ParseFailed` error.
// This whole variant should go away because of the inner string.
pub(crate) fn parse_failed_error(msg: &'static str) -> Error {
Error::Parse(ParseError::ParseFailed(msg))
}

View File

@ -6,12 +6,12 @@
//! conform to Bitcoin consensus. //! conform to Bitcoin consensus.
pub mod encode; pub mod encode;
mod error;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub mod serde; pub mod serde;
use core::fmt; use core::fmt;
use internals::write_err;
use io::{BufRead, Read}; use io::{BufRead, Read};
use crate::consensus; use crate::consensus;
@ -20,7 +20,9 @@ use crate::consensus;
#[doc(inline)] #[doc(inline)]
pub use self::{ pub use self::{
encode::{deserialize, deserialize_partial, serialize, Decodable, Encodable, ReadExt, WriteExt}, encode::{deserialize, deserialize_partial, serialize, Decodable, Encodable, ReadExt, WriteExt},
error::{Error, FromHexError, DecodeError, ParseError, DeserializeError},
}; };
pub(crate) use self::error::parse_failed_error;
struct IterReader<E: fmt::Debug, I: Iterator<Item = Result<u8, E>>> { struct IterReader<E: fmt::Debug, I: Iterator<Item = Result<u8, E>>> {
iterator: core::iter::Fuse<I>, iterator: core::iter::Fuse<I>,
@ -36,12 +38,12 @@ impl<E: fmt::Debug, I: Iterator<Item = Result<u8, E>>> IterReader<E, I> {
fn decode<T: Decodable>(mut self) -> Result<T, DecodeError<E>> { fn decode<T: Decodable>(mut self) -> Result<T, DecodeError<E>> {
let result = T::consensus_decode(&mut self); let result = T::consensus_decode(&mut self);
match (result, self.error) { match (result, self.error) {
(Ok(_), None) if self.iterator.next().is_some() => Err(DecodeError::TooManyBytes), (Ok(_), None) if self.iterator.next().is_some() => Err(DecodeError::Unconsumed),
(Ok(value), None) => Ok(value), (Ok(value), None) => Ok(value),
(Ok(_), Some(error)) => panic!("{} silently ate the error: {:?}", core::any::type_name::<T>(), error), (Ok(_), Some(error)) => panic!("{} silently ate the error: {:?}", core::any::type_name::<T>(), error),
(Err(consensus::encode::Error::Io(io_error)), Some(de_error)) if io_error.kind() == io::ErrorKind::Other && io_error.get_ref().is_none() => Err(DecodeError::Other(de_error)), (Err(consensus::encode::Error::Io(io_error)), Some(de_error)) if io_error.kind() == io::ErrorKind::Other && io_error.get_ref().is_none() => Err(DecodeError::Other(de_error)),
(Err(consensus_error), None) => Err(DecodeError::Consensus(consensus_error)), (Err(consensus::encode::Error::Parse(parse_error)), None) => Err(DecodeError::Parse(parse_error)),
(Err(consensus::encode::Error::Io(io_error)), de_error) => panic!("unexpected IO error {:?} returned from {}::consensus_decode(), deserialization error: {:?}", io_error, core::any::type_name::<T>(), de_error), (Err(consensus::encode::Error::Io(io_error)), de_error) => panic!("unexpected IO error {:?} returned from {}::consensus_decode(), deserialization error: {:?}", io_error, core::any::type_name::<T>(), de_error),
(Err(consensus_error), Some(de_error)) => panic!("{} should've returned `Other` IO error because of deserialization error {:?} but it returned consensus error {:?} instead", core::any::type_name::<T>(), de_error, consensus_error), (Err(consensus_error), Some(de_error)) => panic!("{} should've returned `Other` IO error because of deserialization error {:?} but it returned consensus error {:?} instead", core::any::type_name::<T>(), de_error, consensus_error),
} }
@ -102,42 +104,3 @@ impl<E: fmt::Debug, I: Iterator<Item = Result<u8, E>>> BufRead for IterReader<E,
} }
} }
} }
/// Error when consensus decoding from an `[IterReader]`.
#[derive(Debug)]
pub enum DecodeError<E> {
/// Attempted to decode an object from an iterator that yielded too many bytes.
TooManyBytes,
/// Invalid consensus encoding.
Consensus(consensus::encode::Error),
/// Other decoding error.
Other(E),
}
internals::impl_from_infallible!(DecodeError<E>);
impl<E: fmt::Debug> fmt::Display for DecodeError<E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use DecodeError::*;
match *self {
TooManyBytes =>
write!(f, "attempted to decode object from an iterator that yielded too many bytes"),
Consensus(ref e) => write_err!(f, "invalid consensus encoding"; e),
Other(ref other) => write!(f, "other decoding error: {:?}", other),
}
}
}
#[cfg(feature = "std")]
impl<E: fmt::Debug> std::error::Error for DecodeError<E> {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
use DecodeError::*;
match *self {
TooManyBytes => None,
Consensus(ref e) => Some(e),
Other(_) => None, // TODO: Is this correct?
}
}
}

View File

@ -17,8 +17,7 @@ use serde::de::{SeqAccess, Unexpected, Visitor};
use serde::ser::SerializeSeq; use serde::ser::SerializeSeq;
use serde::{Deserializer, Serializer}; use serde::{Deserializer, Serializer};
use super::encode::Error as ConsensusError; use super::{Decodable, Encodable, ParseError};
use super::{Decodable, Encodable};
use crate::consensus::{DecodeError, IterReader}; use crate::consensus::{DecodeError, IterReader};
/// Hex-encoding strategy /// Hex-encoding strategy
@ -358,24 +357,25 @@ impl<D: fmt::Display> serde::de::Expected for DisplayExpected<D> {
} }
// not a trait impl because we panic on some variants // not a trait impl because we panic on some variants
fn consensus_error_into_serde<E: serde::de::Error>(error: ConsensusError) -> E { fn consensus_error_into_serde<E: serde::de::Error>(error: ParseError) -> E {
match error { match error {
ConsensusError::Io(error) => panic!("unexpected IO error {:?}", error), ParseError::MissingData =>
ConsensusError::OversizedVectorAllocation { requested, max } => E::custom(format_args!( E::custom("missing data (early end of file or slice too short)"),
ParseError::OversizedVectorAllocation { requested, max } => E::custom(format_args!(
"the requested allocation of {} items exceeds maximum of {}", "the requested allocation of {} items exceeds maximum of {}",
requested, max requested, max
)), )),
ConsensusError::InvalidChecksum { expected, actual } => E::invalid_value( ParseError::InvalidChecksum { expected, actual } => E::invalid_value(
Unexpected::Bytes(&actual), Unexpected::Bytes(&actual),
&DisplayExpected(format_args!( &DisplayExpected(format_args!(
"checksum {:02x}{:02x}{:02x}{:02x}", "checksum {:02x}{:02x}{:02x}{:02x}",
expected[0], expected[1], expected[2], expected[3] expected[0], expected[1], expected[2], expected[3]
)), )),
), ),
ConsensusError::NonMinimalVarInt => ParseError::NonMinimalVarInt =>
E::custom(format_args!("compact size was not encoded minimally")), E::custom(format_args!("compact size was not encoded minimally")),
ConsensusError::ParseFailed(msg) => E::custom(msg), ParseError::ParseFailed(msg) => E::custom(msg),
ConsensusError::UnsupportedSegwitFlag(flag) => ParseError::UnsupportedSegwitFlag(flag) =>
E::invalid_value(Unexpected::Unsigned(flag.into()), &"segwit version 1 flag"), E::invalid_value(Unexpected::Unsigned(flag.into()), &"segwit version 1 flag"),
} }
} }
@ -387,8 +387,8 @@ where
fn unify(self) -> E { fn unify(self) -> E {
match self { match self {
DecodeError::Other(error) => error, DecodeError::Other(error) => error,
DecodeError::TooManyBytes => E::custom(format_args!("got more bytes than expected")), DecodeError::Unconsumed => E::custom(format_args!("got more bytes than expected")),
DecodeError::Consensus(error) => consensus_error_into_serde(error), DecodeError::Parse(e) => consensus_error_into_serde(e),
} }
} }
} }
@ -400,8 +400,8 @@ where
fn into_de_error<DE: serde::de::Error>(self) -> DE { fn into_de_error<DE: serde::de::Error>(self) -> DE {
match self { match self {
DecodeError::Other(error) => error.into_de_error(), DecodeError::Other(error) => error.into_de_error(),
DecodeError::TooManyBytes => DE::custom(format_args!("got more bytes than expected")), DecodeError::Unconsumed => DE::custom(format_args!("got more bytes than expected")),
DecodeError::Consensus(error) => consensus_error_into_serde(error), DecodeError::Parse(e) => consensus_error_into_serde(e),
} }
} }
} }

View File

@ -426,10 +426,11 @@ impl Decodable for PartialMerkleTree {
let nb_bytes_for_bits = r.read_compact_size()? as usize; let nb_bytes_for_bits = r.read_compact_size()? as usize;
if nb_bytes_for_bits > MAX_VEC_SIZE { if nb_bytes_for_bits > MAX_VEC_SIZE {
return Err(encode::Error::OversizedVectorAllocation { return Err(encode::ParseError::OversizedVectorAllocation {
requested: nb_bytes_for_bits, requested: nb_bytes_for_bits,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
}); }
.into());
} }
let mut bits = vec![false; nb_bytes_for_bits * 8]; let mut bits = vec![false; nb_bytes_for_bits * 8];
for chunk in bits.chunks_mut(8) { for chunk in bits.chunks_mut(8) {

View File

@ -10,6 +10,7 @@ use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr, SocketAddrV4, SocketAddrV6, ToSoc
use io::{BufRead, Read, Write}; use io::{BufRead, Read, Write};
use crate::consensus;
use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt}; use crate::consensus::encode::{self, Decodable, Encodable, ReadExt, WriteExt};
use crate::p2p::ServiceFlags; use crate::p2p::ServiceFlags;
@ -165,28 +166,28 @@ impl Decodable for AddrV2 {
let network_id = u8::consensus_decode(r)?; let network_id = u8::consensus_decode(r)?;
let len = r.read_compact_size()?; let len = r.read_compact_size()?;
if len > 512 { if len > 512 {
return Err(encode::Error::ParseFailed("IP must be <= 512 bytes")); return Err(consensus::parse_failed_error("IP must be <= 512 bytes"));
} }
Ok(match network_id { Ok(match network_id {
1 => { 1 => {
if len != 4 { if len != 4 {
return Err(encode::Error::ParseFailed("invalid IPv4 address")); return Err(consensus::parse_failed_error("invalid IPv4 address"));
} }
let addr: [u8; 4] = Decodable::consensus_decode(r)?; let addr: [u8; 4] = Decodable::consensus_decode(r)?;
AddrV2::Ipv4(Ipv4Addr::new(addr[0], addr[1], addr[2], addr[3])) AddrV2::Ipv4(Ipv4Addr::new(addr[0], addr[1], addr[2], addr[3]))
} }
2 => { 2 => {
if len != 16 { if len != 16 {
return Err(encode::Error::ParseFailed("invalid IPv6 address")); return Err(consensus::parse_failed_error("invalid IPv6 address"));
} }
let addr: [u16; 8] = read_be_address(r)?; let addr: [u16; 8] = read_be_address(r)?;
if addr[0..3] == ONION { if addr[0..3] == ONION {
return Err(encode::Error::ParseFailed( return Err(consensus::parse_failed_error(
"OnionCat address sent with IPv6 network id", "OnionCat address sent with IPv6 network id",
)); ));
} }
if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] { if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] {
return Err(encode::Error::ParseFailed( return Err(consensus::parse_failed_error(
"IPV4 wrapped address sent with IPv6 network id", "IPV4 wrapped address sent with IPv6 network id",
)); ));
} }
@ -196,33 +197,33 @@ impl Decodable for AddrV2 {
} }
3 => { 3 => {
if len != 10 { if len != 10 {
return Err(encode::Error::ParseFailed("invalid TorV2 address")); return Err(consensus::parse_failed_error("invalid TorV2 address"));
} }
let id = Decodable::consensus_decode(r)?; let id = Decodable::consensus_decode(r)?;
AddrV2::TorV2(id) AddrV2::TorV2(id)
} }
4 => { 4 => {
if len != 32 { if len != 32 {
return Err(encode::Error::ParseFailed("invalid TorV3 address")); return Err(consensus::parse_failed_error("invalid TorV3 address"));
} }
let pubkey = Decodable::consensus_decode(r)?; let pubkey = Decodable::consensus_decode(r)?;
AddrV2::TorV3(pubkey) AddrV2::TorV3(pubkey)
} }
5 => { 5 => {
if len != 32 { if len != 32 {
return Err(encode::Error::ParseFailed("invalid I2P address")); return Err(consensus::parse_failed_error("invalid I2P address"));
} }
let hash = Decodable::consensus_decode(r)?; let hash = Decodable::consensus_decode(r)?;
AddrV2::I2p(hash) AddrV2::I2p(hash)
} }
6 => { 6 => {
if len != 16 { if len != 16 {
return Err(encode::Error::ParseFailed("invalid CJDNS address")); return Err(consensus::parse_failed_error("invalid CJDNS address"));
} }
let addr: [u16; 8] = read_be_address(r)?; let addr: [u16; 8] = read_be_address(r)?;
// check the first byte for the CJDNS marker // check the first byte for the CJDNS marker
if addr[0] >> 8 != 0xFC { if addr[0] >> 8 != 0xFC {
return Err(encode::Error::ParseFailed("invalid CJDNS address")); return Err(consensus::parse_failed_error("invalid CJDNS address"));
} }
AddrV2::Cjdns(Ipv6Addr::new( AddrV2::Cjdns(Ipv6Addr::new(
addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7], addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7],

View File

@ -19,7 +19,7 @@ use crate::p2p::{
Magic, Magic,
}; };
use crate::prelude::{Box, Cow, String, ToOwned, Vec}; use crate::prelude::{Box, Cow, String, ToOwned, Vec};
use crate::{block, transaction}; use crate::{block, consensus, transaction};
/// The maximum number of [super::message_blockdata::Inventory] items in an `inv` message. /// The maximum number of [super::message_blockdata::Inventory] items in an `inv` message.
/// ///
@ -418,7 +418,7 @@ impl Decodable for HeaderDeserializationWrapper {
for _ in 0..len { for _ in 0..len {
ret.push(Decodable::consensus_decode(r)?); ret.push(Decodable::consensus_decode(r)?);
if u8::consensus_decode(r)? != 0u8 { if u8::consensus_decode(r)? != 0u8 {
return Err(encode::Error::ParseFailed( return Err(consensus::parse_failed_error(
"Headers message should not contain transactions", "Headers message should not contain transactions",
)); ));
} }

View File

@ -6,7 +6,7 @@
use io::{BufRead, Write}; use io::{BufRead, Write};
use crate::consensus::{encode, Decodable, Encodable, ReadExt}; use crate::consensus::{self, encode, Decodable, Encodable, ReadExt};
use crate::internal_macros::impl_consensus_encoding; use crate::internal_macros::impl_consensus_encoding;
/// `filterload` message sets the current bloom filter /// `filterload` message sets the current bloom filter
@ -52,7 +52,7 @@ impl Decodable for BloomFlags {
0 => BloomFlags::None, 0 => BloomFlags::None,
1 => BloomFlags::All, 1 => BloomFlags::All,
2 => BloomFlags::PubkeyOnly, 2 => BloomFlags::PubkeyOnly,
_ => return Err(encode::Error::ParseFailed("unknown bloom flag")), _ => return Err(consensus::parse_failed_error("unknown bloom flag")),
}) })
} }
} }

View File

@ -8,7 +8,7 @@
use hashes::sha256d; use hashes::sha256d;
use io::{BufRead, Write}; use io::{BufRead, Write};
use crate::consensus::{encode, Decodable, Encodable, ReadExt}; use crate::consensus::{self, encode, Decodable, Encodable, ReadExt};
use crate::internal_macros::impl_consensus_encoding; use crate::internal_macros::impl_consensus_encoding;
use crate::p2p; use crate::p2p;
use crate::p2p::address::Address; use crate::p2p::address::Address;
@ -126,7 +126,7 @@ impl Decodable for RejectReason {
0x41 => RejectReason::Dust, 0x41 => RejectReason::Dust,
0x42 => RejectReason::Fee, 0x42 => RejectReason::Fee,
0x43 => RejectReason::Checkpoint, 0x43 => RejectReason::Checkpoint,
_ => return Err(encode::Error::ParseFailed("unknown reject code")), _ => return Err(consensus::parse_failed_error("unknown reject code")),
}) })
} }
} }

View File

@ -71,6 +71,10 @@ pub enum Error {
CombineInconsistentKeySources(Box<Xpub>), CombineInconsistentKeySources(Box<Xpub>),
/// Serialization error in bitcoin consensus-encoded structures /// Serialization error in bitcoin consensus-encoded structures
ConsensusEncoding(encode::Error), ConsensusEncoding(encode::Error),
/// Deserialization error in bitcoin consensus-encoded structures.
ConsensusDeserialize(encode::DeserializeError),
/// Error parsing bitcoin consensus-encoded object.
ConsensusParse(encode::ParseError),
/// Negative fee /// Negative fee
NegativeFee, NegativeFee,
/// Integer overflow in fee calculation /// Integer overflow in fee calculation
@ -141,6 +145,9 @@ impl fmt::Display for Error {
write!(f, "combine conflict: {}", s) write!(f, "combine conflict: {}", s)
} }
ConsensusEncoding(ref e) => write_err!(f, "bitcoin consensus encoding error"; e), ConsensusEncoding(ref e) => write_err!(f, "bitcoin consensus encoding error"; e),
ConsensusDeserialize(ref e) =>
write_err!(f, "bitcoin consensus deserializaton error"; e),
ConsensusParse(ref e) => write_err!(f, "error parsing bitcoin consensus encoded object"; e),
NegativeFee => f.write_str("PSBT has a negative fee which is not allowed"), NegativeFee => f.write_str("PSBT has a negative fee which is not allowed"),
FeeOverflow => f.write_str("integer overflow in fee calculation"), FeeOverflow => f.write_str("integer overflow in fee calculation"),
InvalidPublicKey(ref e) => write_err!(f, "invalid public key"; e), InvalidPublicKey(ref e) => write_err!(f, "invalid public key"; e),
@ -169,6 +176,8 @@ impl std::error::Error for Error {
match *self { match *self {
InvalidHash(ref e) => Some(e), InvalidHash(ref e) => Some(e),
ConsensusEncoding(ref e) => Some(e), ConsensusEncoding(ref e) => Some(e),
ConsensusDeserialize(ref e) => Some(e),
ConsensusParse(ref e) => Some(e),
Io(ref e) => Some(e), Io(ref e) => Some(e),
InvalidMagic InvalidMagic
| MissingUtxo | MissingUtxo
@ -211,6 +220,14 @@ impl From<encode::Error> for Error {
fn from(e: encode::Error) -> Self { Error::ConsensusEncoding(e) } fn from(e: encode::Error) -> Self { Error::ConsensusEncoding(e) }
} }
impl From<encode::DeserializeError> for Error {
fn from(e: encode::DeserializeError) -> Self { Error::ConsensusDeserialize(e) }
}
impl From<encode::ParseError> for Error {
fn from(e: encode::ParseError) -> Self { Error::ConsensusParse(e) }
}
impl From<io::Error> for Error { impl From<io::Error> for Error {
fn from(e: io::Error) -> Self { Error::Io(e) } fn from(e: io::Error) -> Self { Error::Io(e) }
} }

View File

@ -82,10 +82,10 @@ impl Key {
let key_byte_size: u64 = byte_size - 1; let key_byte_size: u64 = byte_size - 1;
if key_byte_size > MAX_VEC_SIZE.to_u64() { if key_byte_size > MAX_VEC_SIZE.to_u64() {
return Err(encode::Error::OversizedVectorAllocation { return Err(encode::Error::Parse(encode::ParseError::OversizedVectorAllocation {
requested: key_byte_size as usize, requested: key_byte_size as usize,
max: MAX_VEC_SIZE, max: MAX_VEC_SIZE,
} })
.into()); .into());
} }