Compare commits

...

11 Commits

Author SHA1 Message Date
Denis Kayshev 8fd1913052
Merge dda31217db into 90c19a835d 2025-03-26 08:42:22 +00:00
dependabot[bot] 90c19a835d
build(deps): bump taiki-e/install-action from 2.49.32 to 2.49.33 (#3602)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.49.32 to 2.49.33.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.49.32...v2.49.33)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-24 11:52:08 +00:00
dependabot[bot] adf57d2b24
build(deps): bump time from 0.3.40 to 0.3.41 (#3601)
Bumps [time](https://github.com/time-rs/time) from 0.3.40 to 0.3.41.
- [Release notes](https://github.com/time-rs/time/releases)
- [Changelog](https://github.com/time-rs/time/blob/main/CHANGELOG.md)
- [Commits](https://github.com/time-rs/time/compare/v0.3.40...v0.3.41)

---
updated-dependencies:
- dependency-name: time
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-24 11:51:57 +00:00
dependabot[bot] fcd10fbb5e
build(deps): bump taiki-e/install-action from 2.49.17 to 2.49.27 (#3600)
Bumps [taiki-e/install-action](https://github.com/taiki-e/install-action) from 2.49.17 to 2.49.27.
- [Release notes](https://github.com/taiki-e/install-action/releases)
- [Changelog](https://github.com/taiki-e/install-action/blob/main/CHANGELOG.md)
- [Commits](https://github.com/taiki-e/install-action/compare/v2.49.17...v2.49.27)

---
updated-dependencies:
- dependency-name: taiki-e/install-action
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-21 06:58:14 +00:00
Rob Ede 95b6a81f43
refactor: switch size parsing to bytesize crate 2025-03-21 06:06:50 +00:00
Rob Ede ab18efe0ac
chore: check in lockfile 2025-03-21 05:51:45 +00:00
Rob Ede dda31217db
Merge branch 'master' into pr/topenkoff/2948 2024-06-11 01:54:20 +01:00
Denis Kayshev 1c14195c4b
update `CHANGES.md` 2023-01-18 20:46:18 +03:00
Denis Kayshev c30228068e
add docs for compress ranges 2023-01-18 20:45:04 +03:00
Denis Kayshev 7d281e543c
remove `set_` prefix 2023-01-18 20:44:49 +03:00
Denis Kayshev 21219e0843
add setup to commpress middleware 2023-01-18 20:44:29 +03:00
18 changed files with 4207 additions and 51 deletions

3
.cspell.yml Normal file
View File

@ -0,0 +1,3 @@
version: "0.2"
words:
- actix

View File

@ -49,7 +49,7 @@ jobs:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
@ -83,7 +83,7 @@ jobs:
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
- name: Install just, cargo-hack
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just,cargo-hack

View File

@ -64,7 +64,7 @@ jobs:
toolchain: ${{ matrix.version.version }}
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
@ -113,7 +113,7 @@ jobs:
toolchain: nightly
- name: Install just
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just

View File

@ -24,7 +24,7 @@ jobs:
components: llvm-tools
- name: Install just, cargo-llvm-cov, cargo-nextest
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just,cargo-llvm-cov,cargo-nextest

View File

@ -77,7 +77,7 @@ jobs:
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
- name: Install just
uses: taiki-e/install-action@v2.49.17
uses: taiki-e/install-action@v2.49.33
with:
tool: just

1
.gitignore vendored
View File

@ -1,4 +1,3 @@
Cargo.lock
target/
guide/build/
/gh-pages

3970
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -27,6 +27,7 @@
### Added
- Add `error::InvalidStatusCode` re-export.
- New method `response_with_level` for `Encoder<B>` for setup compress level. [#2948]
## 3.7.0

View File

@ -28,6 +28,30 @@ use crate::{
const MAX_CHUNK_SIZE_ENCODE_IN_PLACE: usize = 1024;
// https://www.zlib.net/manual.html#Constants
const DEFLATE_MIN_LEVEL: u32 = 0;
// https://www.zlib.net/manual.html#Constants
const DEFLATE_MAX_LEVEL: u32 = 9;
const DEFLATE_DEFAULT: u32 = 1;
// https://www.zlib.net/manual.html#Constants
const GZIP_MIN_LEVEL: u32 = 0;
// https://www.zlib.net/manual.html#Constants
const GZIP_MAX_LEVEL: u32 = 9;
const GZIP_DEFAULT: u32 = 1;
// https://www.brotli.org/encode.html#a94f
const BROTLI_MIN_QUALITY: u32 = 0;
// https://www.brotli.org/encode.html#ac45
const BROTLI_MAX_QUALITY: u32 = 11;
const BROTLI_DEFAULT: u32 = 3;
// https://github.com/facebook/zstd/blob/dev/doc/zstd_manual.html#L42-L43
const ZSTD_MIN_LEVEL: i32 = 0;
// https://github.com/facebook/zstd/blob/dev/doc/zstd_manual.html#L42-L43
const ZSTD_MAX_LEVEL: i32 = 22;
const ZSTD_DEFAULT: i32 = 3;
pin_project! {
pub struct Encoder<B> {
#[pin]
@ -60,6 +84,15 @@ impl<B: MessageBody> Encoder<B> {
}
pub fn response(encoding: ContentEncoding, head: &mut ResponseHead, body: B) -> Self {
Encoder::response_with_level(encoding, head, body, None)
}
pub fn response_with_level(
encoding: ContentEncoding,
head: &mut ResponseHead,
body: B,
level: Option<u32>,
) -> Self {
// no need to compress empty bodies
match body.size() {
BodySize::None => return Self::none(),
@ -78,8 +111,9 @@ impl<B: MessageBody> Encoder<B> {
};
if should_encode {
let enconding_level = ContentEncodingWithLevel::new(encoding, level);
// wrap body only if encoder is feature-enabled
if let Some(enc) = ContentEncoder::select(encoding) {
if let Some(enc) = ContentEncoder::select(enconding_level) {
update_head(encoding, head);
return Encoder {
@ -287,27 +321,74 @@ enum ContentEncoder {
Zstd(ZstdEncoder<'static, Writer>),
}
enum ContentEncodingWithLevel {
Deflate(u32),
Gzip(u32),
Brotli(u32),
Zstd(i32),
Identity,
}
impl ContentEncodingWithLevel {
pub fn new(encoding: ContentEncoding, level: Option<u32>) -> Self {
match encoding {
ContentEncoding::Deflate => {
let level = level
.filter(|l| (DEFLATE_MIN_LEVEL..(DEFLATE_MAX_LEVEL + 1)).contains(l))
.unwrap_or(DEFLATE_DEFAULT);
ContentEncodingWithLevel::Deflate(level)
}
ContentEncoding::Gzip => {
let level = level
.filter(|l| (GZIP_MIN_LEVEL..(GZIP_MAX_LEVEL + 1)).contains(l))
.unwrap_or(GZIP_DEFAULT);
ContentEncodingWithLevel::Gzip(level)
}
ContentEncoding::Brotli => {
let level = level
.filter(|l| (BROTLI_MIN_QUALITY..(BROTLI_MAX_QUALITY + 1)).contains(l))
.unwrap_or(BROTLI_DEFAULT);
ContentEncodingWithLevel::Brotli(level)
}
ContentEncoding::Zstd => {
let level = level
.map(|l| l as i32)
.filter(|l| (ZSTD_MIN_LEVEL..(ZSTD_MAX_LEVEL + 1)).contains(l))
.unwrap_or(ZSTD_DEFAULT);
ContentEncodingWithLevel::Zstd(level)
}
ContentEncoding::Identity => ContentEncodingWithLevel::Identity,
}
}
}
impl ContentEncoder {
fn select(encoding: ContentEncoding) -> Option<Self> {
fn select(encoding: ContentEncodingWithLevel) -> Option<Self> {
match encoding {
#[cfg(feature = "compress-gzip")]
ContentEncoding::Deflate => Some(ContentEncoder::Deflate(ZlibEncoder::new(
Writer::new(),
flate2::Compression::fast(),
))),
ContentEncodingWithLevel::Deflate(level) => Some(ContentEncoder::Deflate(
ZlibEncoder::new(Writer::new(), flate2::Compression::new(level)),
)),
#[cfg(feature = "compress-gzip")]
ContentEncoding::Gzip => Some(ContentEncoder::Gzip(GzEncoder::new(
ContentEncodingWithLevel::Gzip(level) => Some(ContentEncoder::Gzip(GzEncoder::new(
Writer::new(),
flate2::Compression::fast(),
flate2::Compression::new(level),
))),
#[cfg(feature = "compress-brotli")]
ContentEncoding::Brotli => Some(ContentEncoder::Brotli(new_brotli_compressor())),
ContentEncodingWithLevel::Brotli(level) => Some(ContentEncoder::Brotli(Box::new(
brotli::CompressorWriter::new(
Writer::new(),
32 * 1024, // 32 KiB buffer
level, // BROTLI_PARAM_QUALITY
22, // BROTLI_PARAM_LGWIN
),
))),
#[cfg(feature = "compress-zstd")]
ContentEncoding::Zstd => {
let encoder = ZstdEncoder::new(Writer::new(), 3).ok()?;
ContentEncodingWithLevel::Zstd(level) => {
let encoder = ZstdEncoder::new(Writer::new(), level).ok()?;
Some(ContentEncoder::Zstd(encoder))
}
@ -401,16 +482,6 @@ impl ContentEncoder {
}
}
#[cfg(feature = "compress-brotli")]
fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
Box::new(brotli::CompressorWriter::new(
Writer::new(),
32 * 1024, // 32 KiB buffer
3, // BROTLI_PARAM_QUALITY
22, // BROTLI_PARAM_LGWIN
))
}
#[derive(Debug, Display)]
#[non_exhaustive]
pub enum EncoderError {

View File

@ -18,8 +18,8 @@ all-features = true
proc-macro = true
[dependencies]
bytesize = "2"
darling = "0.20"
parse-size = "1"
proc-macro2 = "1"
quote = "1"
syn = "2"
@ -27,7 +27,7 @@ syn = "2"
[dev-dependencies]
actix-multipart = "0.7"
actix-web = "4"
rustversion = "1"
rustversion-msrv = "0.100"
trybuild = "1"
[lints]

View File

@ -9,8 +9,8 @@
use std::collections::HashSet;
use bytesize::ByteSize;
use darling::{FromDeriveInput, FromField, FromMeta};
use parse_size::parse_size;
use proc_macro::TokenStream;
use proc_macro2::Ident;
use quote::quote;
@ -103,7 +103,7 @@ struct ParsedField<'t> {
/// # Field Limits
///
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
/// string is parsed using [parse_size].
/// string is parsed using [`bytesize`].
///
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
///
@ -150,7 +150,7 @@ struct ParsedField<'t> {
/// struct Form { }
/// ```
///
/// [parse_size]: https://docs.rs/parse-size/1/parse_size
/// [`bytesize`]: https://docs.rs/bytesize/2
#[proc_macro_derive(MultipartForm, attributes(multipart))]
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: syn::DeriveInput = parse_macro_input!(input);
@ -191,8 +191,8 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
let limit = match attrs.limit.map(|limit| match parse_size(&limit) {
Ok(size) => Ok(usize::try_from(size).unwrap()),
let limit = match attrs.limit.map(|limit| match limit.parse::<ByteSize>() {
Ok(ByteSize(size)) => Ok(usize::try_from(size).unwrap()),
Err(err) => Err(syn::Error::new(
field.ident.as_ref().unwrap().span(),
format!("Could not parse size limit `{}`: {}", limit, err),

View File

@ -1,4 +1,4 @@
#[rustversion::stable(1.72)] // MSRV
#[rustversion_msrv::msrv]
#[test]
fn compile_macros() {
let t = trybuild::TestCases::new();

View File

@ -1,16 +1,16 @@
error: Could not parse size limit `2 bytes`: invalid digit found in string
error: Could not parse size limit `2 bytes`: couldn't parse "bytes" into a known SI unit, couldn't parse unit of "bytes"
--> tests/trybuild/size-limit-parse-fail.rs:6:5
|
6 | description: Text<String>,
| ^^^^^^^^^^^
error: Could not parse size limit `2 megabytes`: invalid digit found in string
error: Could not parse size limit `2 megabytes`: couldn't parse "megabytes" into a known SI unit, couldn't parse unit of "megabytes"
--> tests/trybuild/size-limit-parse-fail.rs:12:5
|
12 | description: Text<String>,
| ^^^^^^^^^^^
error: Could not parse size limit `four meters`: invalid digit found in string
error: Could not parse size limit `four meters`: couldn't parse "four meters" into a ByteSize, cannot parse float from empty string
--> tests/trybuild/size-limit-parse-fail.rs:18:5
|
18 | description: Text<String>,

View File

@ -34,7 +34,7 @@ actix-web = "4"
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
trybuild = "1"
rustversion = "1"
rustversion-msrv = "0.100"
[lints]
workspace = true

View File

@ -1,4 +1,4 @@
#[rustversion::stable(1.72)] // MSRV
#[rustversion_msrv::msrv]
#[test]
fn compile_macros() {
let t = trybuild::TestCases::new();

View File

@ -35,6 +35,7 @@
- Add `web::Html` responder.
- Add `HttpRequest::full_url()` method to get the complete URL of the request.
- Add level setup for `middleware::Compress`.
### Fixed

View File

@ -4,10 +4,11 @@ use std::{
future::Future,
marker::PhantomData,
pin::Pin,
rc::Rc,
task::{Context, Poll},
};
use actix_http::encoding::Encoder;
use actix_http::{encoding::Encoder, header::ContentEncoding};
use actix_service::{Service, Transform};
use actix_utils::future::{ok, Either, Ready};
use futures_core::ready;
@ -55,6 +56,20 @@ use crate::{
/// .wrap(middleware::Compress::default())
/// .default_service(web::to(|| async { HttpResponse::Ok().body("hello world") }));
/// ```
/// You can also set compression level for supported algorithms
/// ```
/// use actix_web::{middleware, web, App, HttpResponse};
///
/// let app = App::new()
/// .wrap(
/// middleware::Compress::new()
/// .gzip_level(3)
/// .deflate_level(1)
/// .brotli_level(7)
/// .zstd_level(10),
/// )
/// .default_service(web::to(|| async { HttpResponse::Ok().body("hello world") }));
/// ```
///
/// Pre-compressed Gzip file being served from disk with correct headers added to bypass middleware:
/// ```no_run
@ -74,7 +89,71 @@ use crate::{
/// [feature flags]: ../index.html#crate-features
#[derive(Debug, Clone, Default)]
#[non_exhaustive]
pub struct Compress;
pub struct Compress {
inner: Rc<Inner>,
}
impl Compress {
/// Constructs new compress middleware instance with default settings.
pub fn new() -> Self {
Default::default()
}
}
#[derive(Debug, Clone, Default)]
struct Inner {
deflate: Option<u32>,
gzip: Option<u32>,
brotli: Option<u32>,
zstd: Option<u32>,
}
impl Inner {
pub fn level(&self, encoding: &ContentEncoding) -> Option<u32> {
match encoding {
ContentEncoding::Deflate => self.deflate,
ContentEncoding::Gzip => self.gzip,
ContentEncoding::Brotli => self.brotli,
ContentEncoding::Zstd => self.zstd,
_ => None,
}
}
}
impl Compress {
/// Set deflate compression level.
///
/// The integer here is on a scale of 0-9.
/// When going out of range, level 1 will be used.
pub fn deflate_level(mut self, value: u32) -> Self {
Rc::get_mut(&mut self.inner).unwrap().deflate = Some(value);
self
}
/// Set gzip compression level.
///
/// The integer here is on a scale of 0-9.
/// When going out of range, level 1 will be used.
pub fn gzip_level(mut self, value: u32) -> Self {
Rc::get_mut(&mut self.inner).unwrap().gzip = Some(value);
self
}
/// Set gzip compression level.
///
/// The integer here is on a scale of 0-11.
/// When going out of range, level 3 will be used.
pub fn brotli_level(mut self, value: u32) -> Self {
Rc::get_mut(&mut self.inner).unwrap().brotli = Some(value);
self
}
/// Set gzip compression level.
///
/// The integer here is on a scale of 0-22.
/// When going out of range, level 3 will be used.
pub fn zstd_level(mut self, value: u32) -> Self {
Rc::get_mut(&mut self.inner).unwrap().zstd = Some(value);
self
}
}
impl<S, B> Transform<S, ServiceRequest> for Compress
where
@ -88,12 +167,16 @@ where
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(CompressMiddleware { service })
ok(CompressMiddleware {
service,
inner: Rc::clone(&self.inner),
})
}
}
pub struct CompressMiddleware<S> {
service: S,
inner: Rc<Inner>,
}
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
@ -112,6 +195,7 @@ where
fn call(&self, req: ServiceRequest) -> Self::Future {
// negotiate content-encoding
let accept_encoding = req.get_header::<AcceptEncoding>();
let inner = self.inner.clone();
let accept_encoding = match accept_encoding {
// missing header; fallback to identity
@ -119,6 +203,7 @@ where
return Either::left(CompressResponse {
encoding: Encoding::identity(),
fut: self.service.call(req),
inner,
_phantom: PhantomData,
})
}
@ -146,6 +231,7 @@ where
Some(encoding) => Either::left(CompressResponse {
fut: self.service.call(req),
encoding,
inner,
_phantom: PhantomData,
}),
}
@ -160,6 +246,7 @@ pin_project! {
#[pin]
fut: S::Future,
encoding: Encoding,
inner: Rc<Inner>,
_phantom: PhantomData<B>,
}
}
@ -182,6 +269,7 @@ where
unimplemented!("encoding '{enc}' should not be here");
}
};
let level = this.inner.level(&enc);
Poll::Ready(Ok(resp.map_body(move |head, body| {
let content_type = head.headers.get(header::CONTENT_TYPE);
@ -205,7 +293,7 @@ where
ContentEncoding::Identity
};
EitherBody::left(Encoder::response(enc, head, body))
EitherBody::left(Encoder::response_with_level(enc, head, body, level))
})))
}
@ -389,6 +477,29 @@ mod tests {
assert!(vary_headers.contains(&HeaderValue::from_static("accept-encoding")));
}
#[actix_rt::test]
async fn custom_compress_level() {
const D: &str = "hello world ";
const DATA: &str = const_str::repeat!(D, 100);
let app = test::init_service({
App::new().wrap(Compress::new().gzip_level(9)).route(
"/compress",
web::get().to(move || HttpResponse::Ok().body(DATA)),
)
})
.await;
let req = test::TestRequest::default()
.uri("/compress")
.insert_header((header::ACCEPT_ENCODING, "gzip"))
.to_request();
let res = test::call_service(&app, req).await;
assert_eq!(res.status(), StatusCode::OK);
let bytes = test::read_body(res).await;
assert_eq!(gzip_decode(bytes), DATA.as_bytes());
}
fn configure_predicate_test(cfg: &mut web::ServiceConfig) {
cfg.route(
"/html",

View File

@ -7,14 +7,14 @@ fmt:
cargo +nightly fmt
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
# Downgrade dev-dependencies necessary to run MSRV checks/tests.
# Downgrade dependencies necessary to run MSRV checks/tests.
[private]
downgrade-for-msrv:
cargo update -p=parse-size --precise=1.0.0
cargo update -p=clap --precise=4.4.18
cargo update -p=divan --precise=0.1.15
cargo update -p=litemap --precise=0.7.4
cargo update -p=zerofrom --precise=0.1.5
cargo update -p=clap --precise=4.4.18 # next ver: 1.74.0
cargo update -p=divan --precise=0.1.15 # next ver: 1.80.0
cargo update -p=litemap --precise=0.7.4 # next ver: 1.81.0
cargo update -p=zerofrom --precise=0.1.5 # next ver: 1.81.0
cargo update -p=half --precise=2.4.1 # next ver: 1.81.0
msrv := ```
cargo metadata --format-version=1 \