Sanity-check vector length when deserializing

This commit is contained in:
Andrew Poelstra 2017-04-17 00:54:38 +00:00
parent 4b73c3223c
commit cdb452f79f
2 changed files with 9 additions and 2 deletions

View File

@ -1,7 +1,7 @@
[package]
name = "bitcoin"
version = "0.9.0"
version = "0.9.1"
authors = ["Andrew Poelstra <apoelstra@wpsoftware.net>"]
license = "CC0-1.0"
homepage = "https://github.com/apoelstra/rust-bitcoin/"

View File

@ -31,11 +31,14 @@
use std::collections::HashMap;
use std::hash::Hash;
use std::u32;
use std::{mem, u32};
use util::hash::Sha256dHash;
use network::serialize::{SimpleDecoder, SimpleEncoder};
/// Maximum size, in bytes, of a vector we are allowed to decode
pub const MAX_VEC_SIZE: usize = 32 * 1024 * 1024;
/// Data which can be encoded in a consensus-consistent way
pub trait ConsensusEncodable<S: SimpleEncoder> {
/// Encode an object with a well-defined format
@ -185,6 +188,10 @@ impl<D: SimpleDecoder, T: ConsensusDecodable<D>> ConsensusDecodable<D> for Vec<T
#[inline]
fn consensus_decode(d: &mut D) -> Result<Vec<T>, D::Error> {
let VarInt(len): VarInt = try!(ConsensusDecodable::consensus_decode(d));
let byte_size = len as usize * mem::size_of::<T>();
if byte_size > MAX_VEC_SIZE {
return Err(d.error(format!("tried to allocate vec of size {} (max {})", byte_size, MAX_VEC_SIZE)));
}
let mut ret = Vec::with_capacity(len as usize);
for _ in 0..len { ret.push(try!(ConsensusDecodable::consensus_decode(d))); }
Ok(ret)