From dcbe7d36aea1423ff1330a4a0153db846139e945 Mon Sep 17 00:00:00 2001 From: Bastien Orivel Date: Thu, 8 Nov 2018 02:38:03 +0100 Subject: [PATCH] Continue decompressing chunks even after hyper is done downloading the body If hyper reads compressed enough data, we were decompressing 32k by 32k but we were throwing away the end of the body because we would end up having lots of backed up data in the cursor when hyper was done. --- components/net/connector.rs | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/components/net/connector.rs b/components/net/connector.rs index 05e4ded2bd9..7e9dcddcba1 100644 --- a/components/net/connector.rs +++ b/components/net/connector.rs @@ -110,7 +110,37 @@ impl Stream for WrappedBody { }, } } else { - None + // Hyper is done downloading but we still have uncompressed data + match self.decoder { + Decoder::Gzip(Some(ref mut decoder)) => { + let mut buf = vec![0; BUF_SIZE]; + let len = decoder.read(&mut buf).ok()?; + if len == 0 { + return None; + } + buf.truncate(len); + Some(buf.into()) + }, + Decoder::Deflate(ref mut decoder) => { + let mut buf = vec![0; BUF_SIZE]; + let len = decoder.read(&mut buf).ok()?; + if len == 0 { + return None; + } + buf.truncate(len); + Some(buf.into()) + }, + Decoder::Brotli(ref mut decoder) => { + let mut buf = vec![0; BUF_SIZE]; + let len = decoder.read(&mut buf).ok()?; + if len == 0 { + return None; + } + buf.truncate(len); + Some(buf.into()) + }, + _ => None, + } } }) })