mirror of https://github.com/fafhrd91/actix-web
Merge branch 'master' into scope_work
This commit is contained in:
commit
22e51a4287
|
@ -45,7 +45,7 @@ jobs:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@v2.21.3
|
uses: taiki-e/install-action@v2.21.17
|
||||||
with:
|
with:
|
||||||
tool: cargo-hack
|
tool: cargo-hack
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ jobs:
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@v2.21.3
|
uses: taiki-e/install-action@v2.21.17
|
||||||
with:
|
with:
|
||||||
tool: cargo-hack
|
tool: cargo-hack
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ jobs:
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
|
||||||
- name: Install nextest
|
- name: Install nextest
|
||||||
uses: taiki-e/install-action@v2.21.3
|
uses: taiki-e/install-action@v2.21.17
|
||||||
with:
|
with:
|
||||||
tool: nextest
|
tool: nextest
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ jobs:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install cargo-hack
|
- name: Install cargo-hack
|
||||||
uses: taiki-e/install-action@v2.21.3
|
uses: taiki-e/install-action@v2.21.17
|
||||||
with:
|
with:
|
||||||
tool: cargo-hack
|
tool: cargo-hack
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ jobs:
|
||||||
components: llvm-tools-preview
|
components: llvm-tools-preview
|
||||||
|
|
||||||
- name: Install cargo-llvm-cov
|
- name: Install cargo-llvm-cov
|
||||||
uses: taiki-e/install-action@v2.21.3
|
uses: taiki-e/install-action@v2.21.17
|
||||||
with:
|
with:
|
||||||
tool: cargo-llvm-cov
|
tool: cargo-llvm-cov
|
||||||
|
|
||||||
|
|
|
@ -22,25 +22,31 @@ jobs:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
|
|
||||||
- run: cargo fmt --all -- --check
|
- name: Check with rustfmt
|
||||||
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: read
|
||||||
checks: write # to add clippy checks to PR diffs
|
checks: write # to add clippy checks to PR diffs
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
- name: Install Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
|
|
||||||
- uses: giraffate/clippy-action@v1.0.1
|
- name: Check with Clippy
|
||||||
|
uses: giraffate/clippy-action@v1.0.1
|
||||||
with:
|
with:
|
||||||
reporter: github-pr-check
|
reporter: github-pr-check
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
clippy_flags: --workspace --all-features --tests --examples --bins -- -Dclippy::todo -Aunknown_lints
|
clippy_flags: >-
|
||||||
|
--workspace --all-features --tests --examples --bins --
|
||||||
|
-A unknown_lints -D clippy::todo -D clippy::dbg_macro
|
||||||
|
|
||||||
lint-docs:
|
lint-docs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
|
@ -5,7 +5,7 @@ authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||||
description = "Various helpers for Actix applications to use during testing"
|
description = "Various helpers for Actix applications to use during testing"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
|
|
|
@ -8,7 +8,7 @@ authors = [
|
||||||
description = "HTTP primitives for the Actix ecosystem"
|
description = "HTTP primitives for the Actix ecosystem"
|
||||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
|
|
|
@ -5,7 +5,7 @@ authors = ["Jacob Halsey <jacob@jhalsey.com>"]
|
||||||
description = "Multipart form derive macro for Actix Web"
|
description = "Multipart form derive macro for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ authors = [
|
||||||
description = "Multipart form support for Actix Web"
|
description = "Multipart form support for Actix Web"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ authors = [
|
||||||
]
|
]
|
||||||
description = "Resource path matching and router"
|
description = "Resource path matching and router"
|
||||||
keywords = ["actix", "router", "routing"]
|
keywords = ["actix", "router", "routing"]
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ authors = [
|
||||||
description = "Integration testing tools for Actix Web applications"
|
description = "Integration testing tools for Actix Web applications"
|
||||||
keywords = ["http", "web", "framework", "async", "futures"]
|
keywords = ["http", "web", "framework", "async", "futures"]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
categories = [
|
categories = [
|
||||||
"network-programming",
|
"network-programming",
|
||||||
"asynchronous",
|
"asynchronous",
|
||||||
|
|
|
@ -3,7 +3,7 @@ name = "actix-web-codegen"
|
||||||
version = "4.2.2"
|
version = "4.2.2"
|
||||||
description = "Routing and runtime macros for Actix Web"
|
description = "Routing and runtime macros for Actix Web"
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
authors = [
|
authors = [
|
||||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||||
"Rob Ede <robjtede@icloud.com>",
|
"Rob Ede <robjtede@icloud.com>",
|
||||||
|
|
|
@ -5,6 +5,11 @@
|
||||||
### Changed
|
### Changed
|
||||||
|
|
||||||
- Updated `zstd` dependency to `0.13`.
|
- Updated `zstd` dependency to `0.13`.
|
||||||
|
- Compression middleware now prefers brotli over zstd over gzip.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Fix validation of `Json` extractor when `JsonConfig::validate_content_type()` is set to false.
|
||||||
|
|
||||||
## 4.4.0
|
## 4.4.0
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ categories = [
|
||||||
"web-programming::websocket"
|
"web-programming::websocket"
|
||||||
]
|
]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
|
@ -149,7 +149,7 @@ impl AcceptEncoding {
|
||||||
|
|
||||||
/// Extracts the most preferable encoding, accounting for [q-factor weighting].
|
/// Extracts the most preferable encoding, accounting for [q-factor weighting].
|
||||||
///
|
///
|
||||||
/// If no q-factors are provided, the first encoding is chosen. Note that items without
|
/// If no q-factors are provided, we prefer brotli > zstd > gzip. Note that items without
|
||||||
/// q-factors are given the maximum preference value.
|
/// q-factors are given the maximum preference value.
|
||||||
///
|
///
|
||||||
/// As per the spec, returns [`Preference::Any`] if acceptable list is empty. Though, if this is
|
/// As per the spec, returns [`Preference::Any`] if acceptable list is empty. Though, if this is
|
||||||
|
@ -167,6 +167,7 @@ impl AcceptEncoding {
|
||||||
|
|
||||||
let mut max_item = None;
|
let mut max_item = None;
|
||||||
let mut max_pref = Quality::ZERO;
|
let mut max_pref = Quality::ZERO;
|
||||||
|
let mut max_rank = 0;
|
||||||
|
|
||||||
// uses manual max lookup loop since we want the first occurrence in the case of same
|
// uses manual max lookup loop since we want the first occurrence in the case of same
|
||||||
// preference but `Iterator::max_by_key` would give us the last occurrence
|
// preference but `Iterator::max_by_key` would give us the last occurrence
|
||||||
|
@ -174,9 +175,13 @@ impl AcceptEncoding {
|
||||||
for pref in &self.0 {
|
for pref in &self.0 {
|
||||||
// only change if strictly greater
|
// only change if strictly greater
|
||||||
// equal items, even while unsorted, still have higher preference if they appear first
|
// equal items, even while unsorted, still have higher preference if they appear first
|
||||||
if pref.quality > max_pref {
|
|
||||||
|
let rank = encoding_rank(pref);
|
||||||
|
|
||||||
|
if (pref.quality, rank) > (max_pref, max_rank) {
|
||||||
max_pref = pref.quality;
|
max_pref = pref.quality;
|
||||||
max_item = Some(pref.item.clone());
|
max_item = Some(pref.item.clone());
|
||||||
|
max_rank = rank;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -203,6 +208,8 @@ impl AcceptEncoding {
|
||||||
/// Returns a sorted list of encodings from highest to lowest precedence, accounting
|
/// Returns a sorted list of encodings from highest to lowest precedence, accounting
|
||||||
/// for [q-factor weighting].
|
/// for [q-factor weighting].
|
||||||
///
|
///
|
||||||
|
/// If no q-factors are provided, we prefer brotli > zstd > gzip.
|
||||||
|
///
|
||||||
/// [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
/// [q-factor weighting]: https://datatracker.ietf.org/doc/html/rfc7231#section-5.3.2
|
||||||
pub fn ranked(&self) -> Vec<Preference<Encoding>> {
|
pub fn ranked(&self) -> Vec<Preference<Encoding>> {
|
||||||
self.ranked_items().map(|q| q.item).collect()
|
self.ranked_items().map(|q| q.item).collect()
|
||||||
|
@ -210,21 +217,44 @@ impl AcceptEncoding {
|
||||||
|
|
||||||
fn ranked_items(&self) -> impl Iterator<Item = QualityItem<Preference<Encoding>>> {
|
fn ranked_items(&self) -> impl Iterator<Item = QualityItem<Preference<Encoding>>> {
|
||||||
if self.0.is_empty() {
|
if self.0.is_empty() {
|
||||||
return vec![].into_iter();
|
return Vec::new().into_iter();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut types = self.0.clone();
|
let mut types = self.0.clone();
|
||||||
|
|
||||||
// use stable sort so items with equal q-factor retain listed order
|
// use stable sort so items with equal q-factor retain listed order
|
||||||
types.sort_by(|a, b| {
|
types.sort_by(|a, b| {
|
||||||
// sort by q-factor descending
|
// sort by q-factor descending then server ranking descending
|
||||||
b.quality.cmp(&a.quality)
|
|
||||||
|
b.quality
|
||||||
|
.cmp(&a.quality)
|
||||||
|
.then(encoding_rank(b).cmp(&encoding_rank(a)))
|
||||||
});
|
});
|
||||||
|
|
||||||
types.into_iter()
|
types.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns server-defined encoding ranking.
|
||||||
|
fn encoding_rank(qv: &QualityItem<Preference<Encoding>>) -> u8 {
|
||||||
|
// ensure that q=0 items are never sorted above identity encoding
|
||||||
|
// invariant: sorting methods calling this fn use first-on-equal approach
|
||||||
|
if qv.quality == Quality::ZERO {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
match qv.item {
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Brotli)) => 5,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Zstd)) => 4,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Gzip)) => 3,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Deflate)) => 2,
|
||||||
|
Preference::Any => 0,
|
||||||
|
Preference::Specific(Encoding::Known(ContentEncoding::Identity)) => 0,
|
||||||
|
Preference::Specific(Encoding::Known(_)) => 1,
|
||||||
|
Preference::Specific(Encoding::Unknown(_)) => 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if "identity" is an acceptable encoding.
|
/// Returns true if "identity" is an acceptable encoding.
|
||||||
///
|
///
|
||||||
/// Internal algorithm relies on item list being in descending order of quality.
|
/// Internal algorithm relies on item list being in descending order of quality.
|
||||||
|
@ -377,11 +407,11 @@ mod tests {
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
test.negotiate([Encoding::gzip(), Encoding::brotli(), Encoding::identity()].iter()),
|
test.negotiate([Encoding::gzip(), Encoding::brotli(), Encoding::identity()].iter()),
|
||||||
Some(Encoding::gzip())
|
Some(Encoding::brotli())
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
test.negotiate([Encoding::brotli(), Encoding::gzip(), Encoding::identity()].iter()),
|
test.negotiate([Encoding::brotli(), Encoding::gzip(), Encoding::identity()].iter()),
|
||||||
Some(Encoding::gzip())
|
Some(Encoding::brotli())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -398,6 +428,9 @@ mod tests {
|
||||||
|
|
||||||
let test = accept_encoding!("br", "gzip", "*");
|
let test = accept_encoding!("br", "gzip", "*");
|
||||||
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
||||||
|
|
||||||
|
let test = accept_encoding!("gzip", "br", "*");
|
||||||
|
assert_eq!(test.ranked(), vec![enc("br"), enc("gzip"), enc("*")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -420,5 +453,8 @@ mod tests {
|
||||||
|
|
||||||
let test = accept_encoding!("br", "gzip", "*");
|
let test = accept_encoding!("br", "gzip", "*");
|
||||||
assert_eq!(test.preference().unwrap(), enc("br"));
|
assert_eq!(test.preference().unwrap(), enc("br"));
|
||||||
|
|
||||||
|
let test = accept_encoding!("gzip", "br", "*");
|
||||||
|
assert_eq!(test.preference().unwrap(), enc("br"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -328,14 +328,19 @@ impl<T: DeserializeOwned> JsonBody<T> {
|
||||||
ctype_required: bool,
|
ctype_required: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// check content-type
|
// check content-type
|
||||||
let can_parse_json = if let Ok(Some(mime)) = req.mime_type() {
|
let can_parse_json = match (ctype_required, req.mime_type()) {
|
||||||
mime.subtype() == mime::JSON
|
(true, Ok(Some(mime))) => {
|
||||||
|| mime.suffix() == Some(mime::JSON)
|
mime.subtype() == mime::JSON
|
||||||
|| ctype_fn.map_or(false, |predicate| predicate(mime))
|
|| mime.suffix() == Some(mime::JSON)
|
||||||
} else {
|
|| ctype_fn.map_or(false, |predicate| predicate(mime))
|
||||||
// if `ctype_required` is false, assume payload is
|
}
|
||||||
// json even when content-type header is missing
|
|
||||||
!ctype_required
|
// if content-type is expected but not parsable as mime type, bail
|
||||||
|
(true, _) => false,
|
||||||
|
|
||||||
|
// if content-type validation is disabled, assume payload is JSON
|
||||||
|
// even when content-type header is missing or invalid mime type
|
||||||
|
(false, _) => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !can_parse_json {
|
if !can_parse_json {
|
||||||
|
@ -725,6 +730,25 @@ mod tests {
|
||||||
assert!(s.is_ok())
|
assert!(s.is_ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_json_ignoring_content_type() {
|
||||||
|
let (req, mut pl) = TestRequest::default()
|
||||||
|
.insert_header((
|
||||||
|
header::CONTENT_LENGTH,
|
||||||
|
header::HeaderValue::from_static("16"),
|
||||||
|
))
|
||||||
|
.insert_header((
|
||||||
|
header::CONTENT_TYPE,
|
||||||
|
header::HeaderValue::from_static("invalid/value"),
|
||||||
|
))
|
||||||
|
.set_payload(Bytes::from_static(b"{\"name\": \"test\"}"))
|
||||||
|
.app_data(JsonConfig::default().content_type_required(false))
|
||||||
|
.to_http_parts();
|
||||||
|
|
||||||
|
let s = Json::<MyObject>::from_request(&req, &mut pl).await;
|
||||||
|
assert!(s.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_with_config_in_data_wrapper() {
|
async fn test_with_config_in_data_wrapper() {
|
||||||
let (req, mut pl) = TestRequest::default()
|
let (req, mut pl) = TestRequest::default()
|
||||||
|
|
|
@ -96,7 +96,7 @@ async fn negotiate_encoding_gzip() {
|
||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
|
@ -109,7 +109,7 @@ async fn negotiate_encoding_gzip() {
|
||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -123,9 +123,11 @@ async fn negotiate_encoding_gzip() {
|
||||||
async fn negotiate_encoding_br() {
|
async fn negotiate_encoding_br() {
|
||||||
let srv = test_server!();
|
let srv = test_server!();
|
||||||
|
|
||||||
|
// check that brotli content-encoding header is returned
|
||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "br,zstd,gzip"))
|
.insert_header((header::ACCEPT_ENCODING, "br, zstd, gzip"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
|
@ -135,10 +137,26 @@ async fn negotiate_encoding_br() {
|
||||||
let bytes = res.body().await.unwrap();
|
let bytes = res.body().await.unwrap();
|
||||||
assert_eq!(bytes, Bytes::from_static(LOREM));
|
assert_eq!(bytes, Bytes::from_static(LOREM));
|
||||||
|
|
||||||
|
// check that brotli is preferred even when later in (q-less) list
|
||||||
|
|
||||||
|
let req = srv
|
||||||
|
.post("/static")
|
||||||
|
.insert_header((header::ACCEPT_ENCODING, "gzip, zstd, br"))
|
||||||
|
.send();
|
||||||
|
|
||||||
|
let mut res = req.await.unwrap();
|
||||||
|
assert_eq!(res.status(), StatusCode::OK);
|
||||||
|
assert_eq!(res.headers().get(header::CONTENT_ENCODING).unwrap(), "br");
|
||||||
|
|
||||||
|
let bytes = res.body().await.unwrap();
|
||||||
|
assert_eq!(bytes, Bytes::from_static(LOREM));
|
||||||
|
|
||||||
|
// check that returned content is actually brotli encoded
|
||||||
|
|
||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "br,zstd,gzip"))
|
.insert_header((header::ACCEPT_ENCODING, "br, zstd, gzip"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -154,7 +172,7 @@ async fn negotiate_encoding_zstd() {
|
||||||
|
|
||||||
let req = srv
|
let req = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.insert_header((header::ACCEPT_ENCODING, "zstd,gzip,br"))
|
.insert_header((header::ACCEPT_ENCODING, "zstd, gzip, br;q=0.8"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
|
@ -167,7 +185,7 @@ async fn negotiate_encoding_zstd() {
|
||||||
let mut res = srv
|
let mut res = srv
|
||||||
.post("/static")
|
.post("/static")
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
.insert_header((header::ACCEPT_ENCODING, "zstd,gzip,br"))
|
.insert_header((header::ACCEPT_ENCODING, "zstd, gzip, br;q=0.8"))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -207,7 +225,7 @@ async fn gzip_no_decompress() {
|
||||||
// don't decompress response body
|
// don't decompress response body
|
||||||
.no_decompress()
|
.no_decompress()
|
||||||
// signal that we want a compressed body
|
// signal that we want a compressed body
|
||||||
.insert_header((header::ACCEPT_ENCODING, "gzip,br,zstd"))
|
.insert_header((header::ACCEPT_ENCODING, "gzip, br;q=0.8, zstd;q=0.5"))
|
||||||
.send();
|
.send();
|
||||||
|
|
||||||
let mut res = req.await.unwrap();
|
let mut res = req.await.unwrap();
|
||||||
|
|
|
@ -11,7 +11,7 @@ categories = [
|
||||||
"web-programming::websocket",
|
"web-programming::websocket",
|
||||||
]
|
]
|
||||||
homepage = "https://actix.rs"
|
homepage = "https://actix.rs"
|
||||||
repository = "https://github.com/actix/actix-web.git"
|
repository = "https://github.com/actix/actix-web"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue