aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlex Auvolat <alex@adnab.me>2021-12-09 19:00:50 +0100
committerAlex Auvolat <alex@adnab.me>2021-12-09 19:00:50 +0100
commite0912dc5fe61839522db118a2dd2e06bb3c18d40 (patch)
tree149a9efd1d1341360a10c73721419d5ae3a7923e
parent3f2a52dc8e7819f6983c7ae68fb5900fe59d14b7 (diff)
downloadtricot-e0912dc5fe61839522db118a2dd2e06bb3c18d40.tar.gz
tricot-e0912dc5fe61839522db118a2dd2e06bb3c18d40.zip
Remove content-length when compressed; don't compress small response
-rw-r--r--src/https.rs32
1 files changed, 28 insertions, 4 deletions
diff --git a/src/https.rs b/src/https.rs
index 99900d0..1f603b9 100644
--- a/src/https.rs
+++ b/src/https.rs
@@ -7,6 +7,7 @@ use log::*;
use accept_encoding_fork::Encoding;
use async_compression::tokio::bufread::*;
+use futures::StreamExt;
use futures::TryStreamExt;
use http::header::{HeaderName, HeaderValue};
use hyper::server::conn::Http;
@@ -163,7 +164,7 @@ async fn handle(
info!("{} {} {}", method, response.status().as_u16(), uri);
if https_config.enable_compression {
- try_compress(response, accept_encoding, &https_config)
+ try_compress(response, accept_encoding, &https_config).await
} else {
Ok(response)
}
@@ -177,7 +178,7 @@ async fn handle(
}
}
-fn try_compress(
+async fn try_compress(
response: Response<Body>,
accept_encoding: Vec<(Option<Encoding>, f32)>,
https_config: &HttpsConfig,
@@ -230,11 +231,34 @@ fn try_compress(
None => return Ok(response),
};
- debug!("Compressing response body as {:?}", encoding);
+ let (mut head, mut body) = response.into_parts();
- let (mut head, body) = response.into_parts();
+ // ---- If body is smaller than 1400 bytes, don't compress ----
+ let mut chunks = vec![];
+ let mut sum_lengths = 0;
+ while sum_lengths < 1400 {
+ match body.next().await {
+ Some(chunk) => {
+ let chunk = chunk?;
+ sum_lengths += chunk.len();
+ chunks.push(chunk);
+ }
+ None => {
+ return Ok(Response::from_parts(head, Body::from(chunks.concat())));
+ }
+ }
+ }
+
+ // put beginning chunks back into body
+ let body = futures::stream::iter(chunks.into_iter().map(|c| Ok(c))).chain(body);
+
+ // make an async reader from that for compressor
let body_rd =
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
+
+ debug!("Compressing response body as {:?} (at least {} bytes)", encoding, sum_lengths);
+ head.headers.remove(header::CONTENT_LENGTH);
+
let compressed_body = match encoding {
Encoding::Gzip => {
head.headers