Skip to content

Commit

Permalink
Preallocate buffer for async json decoding (#367)
Browse files Browse the repository at this point in the history
This is already done for bytes, and should speed up async JSON processing
considerably, avoiding needles reallocations.
  • Loading branch information
michalmuskala committed Dec 20, 2021
1 parent b886d3c commit 92cbf92
Showing 1 changed file with 11 additions and 11 deletions.
22 changes: 11 additions & 11 deletions src/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,11 +285,7 @@ impl<R: Read> ReadResponseExt<R> for Response<R> {
}

fn bytes(&mut self) -> io::Result<Vec<u8>> {
let mut buf = Vec::new();

if let Some(length) = get_content_length(self) {
buf.reserve(length as usize);
}
let mut buf = allocate_buffer(self);

self.copy_to(&mut buf)?;

Expand Down Expand Up @@ -470,11 +466,7 @@ impl<R: AsyncRead + Unpin> AsyncReadResponseExt<R> for Response<R> {

fn bytes(&mut self) -> BytesFuture<'_, &mut R> {
BytesFuture::new(async move {
let mut buf = Vec::new();

if let Some(length) = get_content_length(self) {
buf.reserve(length as usize);
}
let mut buf = allocate_buffer(self);

copy_async(self.body_mut(), &mut buf).await?;

Expand All @@ -493,7 +485,7 @@ impl<R: AsyncRead + Unpin> AsyncReadResponseExt<R> for Response<R> {
T: serde::de::DeserializeOwned,
{
JsonFuture::new(async move {
let mut buf = Vec::new();
let mut buf = allocate_buffer(self);

// Serde does not support incremental parsing, so we have to resort
// to reading the entire response into memory first and then
Expand All @@ -518,6 +510,14 @@ impl<R: AsyncRead + Unpin> AsyncReadResponseExt<R> for Response<R> {
}
}

fn allocate_buffer<T>(response: &Response<T>) -> Vec<u8> {
if let Some(length) = get_content_length(response) {
Vec::with_capacity(length as usize)
} else {
Vec::new()
}
}

fn get_content_length<T>(response: &Response<T>) -> Option<u64> {
response.headers()
.get(http::header::CONTENT_LENGTH)?
Expand Down

0 comments on commit 92cbf92

Please sign in to comment.