keyfork/keyfork-frame/src/asyncext.rs

47 lines
1.7 KiB
Rust

use std::marker::Unpin;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use super::{hash, verify_checksum, DecodeError, EncodeError};
/// Decode a framed message into a `Vec<u8>`.
///
/// # Errors
/// An error may be returned if:
/// * The given `data` does not contain enough data to parse a length,
/// * The given `data` does not contain the given length's worth of data,
/// * The given `data` has a checksum that does not match what we build locally.
pub async fn try_decode_from(
readable: &mut (impl AsyncRead + Unpin),
) -> Result<Vec<u8>, DecodeError> {
let len = readable.read_u32().await?;
// Note: Pre-filling the vec is *required* as read_exact uses len, not capacity.
let mut data = vec![0u8; len as usize];
readable.read_exact(&mut data[..]).await?;
let content = verify_checksum(&data[..])?;
// Note: Optimizing this isn't *too* practical, we could probably pop the first 32 bytes off
// the front of the Vec, but it might not even be worth it as opposed to one reallocation.
Ok(content.to_vec())
}
/// Encode a &[u8] into a framed message
///
/// # Errors
/// An error may be returned if:
/// * The given `data` is more than [`u32::MAX`] bytes. This is a constraint on a protocol level.
/// * The resulting data was unable to be written to the given `writable`.
pub async fn try_encode_to(
data: &[u8],
writable: &mut (impl AsyncWrite + Unpin),
) -> Result<(), EncodeError> {
let hash = hash(data);
let len = u32::try_from(hash.len() + data.len())
.map_err(|_| EncodeError::InputTooLarge(hash.len() + data.len()))?;
writable.write_u32(len).await?;
writable.write_all(&hash[..]).await?;
writable.write_all(data).await?;
Ok(())
}