mirror of https://github.com/fafhrd91/actix-web
Compare commits
86 Commits
http-v3.9.
...
master
Author | SHA1 | Date |
---|---|---|
|
90c19a835d | |
|
adf57d2b24 | |
|
fcd10fbb5e | |
|
95b6a81f43 | |
|
ab18efe0ac | |
|
cede0c6dbb | |
|
1005b6a12a | |
|
d898e8f739 | |
|
353873fc04 | |
|
1390e29705 | |
|
c6e7ebd185 | |
|
e8351cc3aa | |
|
f63cf69e6a | |
|
92c1e2230d | |
|
4bb495aba0 | |
|
aa000b429d | |
|
df0885cf21 | |
|
0796f8e796 | |
|
a2307fbb86 | |
|
98ced477f7 | |
|
98c263b3ee | |
|
b8bdee0606 | |
|
85843b9b0f | |
|
9656383646 | |
|
cee7451915 | |
|
eb6f6a1976 | |
|
04533a15fa | |
|
a4eaa7f0bb | |
|
66e2afe306 | |
|
59961a58a8 | |
|
33b487e854 | |
|
182055bcb5 | |
|
a36280466c | |
|
8690f80a08 | |
|
91e29c0ce4 | |
|
b0fe679784 | |
|
0fafb486d4 | |
|
5aeb0dd950 | |
|
856480cd90 | |
|
bb1442e20b | |
|
ac2a3bb124 | |
|
8200e4ee82 | |
|
5b60d81f57 | |
|
ee6a6ec03e | |
|
34327bd221 | |
|
472dbca64e | |
|
d8566da66f | |
|
a908afa56b | |
|
8115c818c1 | |
|
002c1b5a19 | |
|
836c75064b | |
|
2132c95b01 | |
|
eff2a20c90 | |
|
9d849c19a5 | |
|
6771be20b3 | |
|
ef977055fc | |
|
568bffeb58 | |
|
03c65d93e5 | |
|
ec05381f6f | |
|
4c05c87b11 | |
|
27c07f122b | |
|
3849cdaa6c | |
|
a5c2d0531b | |
|
049b49290d | |
|
b7a0ff0a3a | |
|
a0a6761bfe | |
|
ff9c0f7157 | |
|
1c4e265a70 | |
|
d9d22825d4 | |
|
9a685cabad | |
|
93edef8fee | |
|
d148e84aba | |
|
7360c732b3 | |
|
48aaf41638 | |
|
bb13f54180 | |
|
b52e77beb4 | |
|
b4f8bda032 | |
|
c055723997 | |
|
d6bdfac1b9 | |
|
78ac5cf482 | |
|
4303dd8c37 | |
|
f61fcbe840 | |
|
538c1bea34 | |
|
70e3758ecc | |
|
5ad92c0062 | |
|
e0918fb179 |
|
@ -0,0 +1,7 @@
|
|||
disallowed-names = [
|
||||
"e", # no single letter error bindings
|
||||
]
|
||||
disallowed-methods = [
|
||||
"std::cell::RefCell::default()",
|
||||
"std::rc::Rc::default()",
|
||||
]
|
|
@ -0,0 +1,3 @@
|
|||
version: "0.2"
|
||||
words:
|
||||
- actix
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
|
||||
- name: Install nasm
|
||||
if: matrix.target.os == 'windows-latest'
|
||||
uses: ilammy/setup-nasm@v1.5.1
|
||||
uses: ilammy/setup-nasm@v1.5.2
|
||||
|
||||
- name: Install OpenSSL
|
||||
if: matrix.target.os == 'windows-latest'
|
||||
|
@ -44,12 +44,12 @@ jobs:
|
|||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust (${{ matrix.version.name }})
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: ${{ matrix.version.version }}
|
||||
|
||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||
|
||||
|
@ -80,10 +80,10 @@ jobs:
|
|||
uses: rui314/setup-mold@v1
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
|
||||
- name: Install just, cargo-hack
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just,cargo-hack
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
|
||||
- name: Install nasm
|
||||
if: matrix.target.os == 'windows-latest'
|
||||
uses: ilammy/setup-nasm@v1.5.1
|
||||
uses: ilammy/setup-nasm@v1.5.2
|
||||
|
||||
- name: Install OpenSSL
|
||||
if: matrix.target.os == 'windows-latest'
|
||||
|
@ -59,12 +59,12 @@ jobs:
|
|||
uses: rui314/setup-mold@v1
|
||||
|
||||
- name: Install Rust (${{ matrix.version.name }})
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: ${{ matrix.version.version }}
|
||||
|
||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||
|
||||
|
@ -92,7 +92,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
|
||||
|
@ -108,12 +108,12 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
|
||||
- name: Install just
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just
|
||||
|
||||
|
|
|
@ -18,13 +18,13 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: llvm-tools
|
||||
|
||||
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just,cargo-llvm-cov,cargo-nextest
|
||||
|
||||
|
@ -32,7 +32,7 @@ jobs:
|
|||
run: just test-coverage-codecov
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
files: codecov.json
|
||||
fail_ci_if_error: true
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rustfmt
|
||||
|
@ -36,7 +36,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
components: clippy
|
||||
|
||||
|
@ -55,7 +55,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: rust-docs
|
||||
|
@ -66,51 +66,25 @@ jobs:
|
|||
run: cargo +nightly doc --no-deps --workspace --all-features
|
||||
|
||||
check-external-types:
|
||||
if: false # rustdoc mismatch currently
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly-2024-05-01)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
- name: Install Rust (${{ vars.RUST_VERSION_EXTERNAL_TYPES }})
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: nightly-2024-05-01
|
||||
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
||||
|
||||
- name: Install just
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
uses: taiki-e/install-action@v2.49.33
|
||||
with:
|
||||
tool: just
|
||||
|
||||
- name: Install cargo-check-external-types
|
||||
uses: taiki-e/cache-cargo-install-action@v2.0.1
|
||||
uses: taiki-e/cache-cargo-install-action@v2.1.1
|
||||
with:
|
||||
tool: cargo-check-external-types
|
||||
|
||||
- name: check external types
|
||||
run: just check-external-types-all +nightly-2024-05-01
|
||||
|
||||
public-api-diff:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout main branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
|
||||
- name: Checkout PR branch
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust (nightly-2024-06-07)
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
with:
|
||||
toolchain: nightly-2024-06-07
|
||||
|
||||
- name: Install cargo-public-api
|
||||
uses: taiki-e/install-action@v2.42.17
|
||||
with:
|
||||
tool: cargo-public-api
|
||||
|
||||
- name: Generate API diff
|
||||
run: |
|
||||
for f in $(find -mindepth 2 -maxdepth 2 -name Cargo.toml); do
|
||||
cargo public-api --manifest-path "$f" --simplified diff ${{ github.event.pull_request.base.sha }}..${{ github.sha }}
|
||||
done
|
||||
run: just check-external-types-all +${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
Cargo.lock
|
||||
target/
|
||||
guide/build/
|
||||
/gh-pages
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -19,7 +19,7 @@ homepage = "https://actix.rs"
|
|||
repository = "https://github.com/actix/actix-web"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.72"
|
||||
rust-version = "1.75"
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 0.6.6
|
||||
|
||||
- Update `tokio-uring` dependency to `0.4`.
|
||||
|
|
|
@ -33,7 +33,7 @@ actix-web = { version = "4", default-features = false }
|
|||
|
||||
bitflags = "2"
|
||||
bytes = "1"
|
||||
derive_more = "0.99.5"
|
||||
derive_more = { version = "2", features = ["display", "error", "from"] }
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
http-range = "0.1.4"
|
||||
log = "0.4"
|
||||
|
|
|
@ -6,11 +6,11 @@ use derive_more::Display;
|
|||
pub enum FilesError {
|
||||
/// Path is not a directory.
|
||||
#[allow(dead_code)]
|
||||
#[display(fmt = "path is not a directory. Unable to serve static files")]
|
||||
#[display("path is not a directory. Unable to serve static files")]
|
||||
IsNotDirectory,
|
||||
|
||||
/// Cannot render directory.
|
||||
#[display(fmt = "unable to render directory without index file")]
|
||||
#[display("unable to render directory without index file")]
|
||||
IsDirectory,
|
||||
}
|
||||
|
||||
|
@ -25,19 +25,19 @@ impl ResponseError for FilesError {
|
|||
#[non_exhaustive]
|
||||
pub enum UriSegmentError {
|
||||
/// Segment started with the wrapped invalid character.
|
||||
#[display(fmt = "segment started with invalid character: ('{_0}')")]
|
||||
#[display("segment started with invalid character: ('{_0}')")]
|
||||
BadStart(char),
|
||||
|
||||
/// Segment contained the wrapped invalid character.
|
||||
#[display(fmt = "segment contained invalid character ('{_0}')")]
|
||||
#[display("segment contained invalid character ('{_0}')")]
|
||||
BadChar(char),
|
||||
|
||||
/// Segment ended with the wrapped invalid character.
|
||||
#[display(fmt = "segment ended with invalid character: ('{_0}')")]
|
||||
#[display("segment ended with invalid character: ('{_0}')")]
|
||||
BadEnd(char),
|
||||
|
||||
/// Path is not a valid UTF-8 string after percent-decoding.
|
||||
#[display(fmt = "path is not a valid UTF-8 string after percent-decoding")]
|
||||
#[display("path is not a valid UTF-8 string after percent-decoding")]
|
||||
NotValidUtf8,
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ impl FilesService {
|
|||
|
||||
let (req, _) = req.into_parts();
|
||||
|
||||
(self.renderer)(&dir, &req).unwrap_or_else(|e| ServiceResponse::from_err(e, req))
|
||||
(self.renderer)(&dir, &req).unwrap_or_else(|err| ServiceResponse::from_err(err, req))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ pub async fn test_server_with_addr<F: ServerServiceFactory<TcpStream>>(
|
|||
builder.set_verify(SslVerifyMode::NONE);
|
||||
let _ = builder
|
||||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
.map_err(|err| log::error!("Can not set ALPN protocol: {err}"));
|
||||
|
||||
Connector::new()
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
|
|
|
@ -2,6 +2,20 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
## 3.10.0
|
||||
|
||||
### Added
|
||||
|
||||
- Add `header::CLEAR_SITE_DATA` constant.
|
||||
- Add `Extensions::get_or_insert[_with]()` methods.
|
||||
- Implement `From<Bytes>` for `Payload`.
|
||||
- Implement `From<Vec<u8>>` for `Payload`.
|
||||
|
||||
### Changed
|
||||
|
||||
- Update `brotli` dependency to `7`.
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 3.9.0
|
||||
|
||||
### Added
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.9.0"
|
||||
version = "3.10.0"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
|
@ -106,12 +106,12 @@ actix-codec = "0.5"
|
|||
actix-utils = "3"
|
||||
actix-rt = { version = "2.2", default-features = false }
|
||||
|
||||
ahash = "0.8"
|
||||
bitflags = "2"
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
derive_more = "0.99.5"
|
||||
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
||||
encoding_rs = "0.8"
|
||||
foldhash = "0.1"
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
http = "0.2.7"
|
||||
httparse = "1.5.1"
|
||||
|
@ -132,14 +132,14 @@ h2 = { version = "0.3.26", optional = true }
|
|||
# websockets
|
||||
local-channel = { version = "0.1", optional = true }
|
||||
base64 = { version = "0.22", optional = true }
|
||||
rand = { version = "0.8", optional = true }
|
||||
rand = { version = "0.9", optional = true }
|
||||
sha1 = { version = "0.10", optional = true }
|
||||
|
||||
# openssl/rustls
|
||||
actix-tls = { version = "3.4", default-features = false, optional = true }
|
||||
|
||||
# compress-*
|
||||
brotli = { version = "6", optional = true }
|
||||
brotli = { version = "7", optional = true }
|
||||
flate2 = { version = "1.0.13", optional = true }
|
||||
zstd = { version = "0.13", optional = true }
|
||||
|
||||
|
@ -160,7 +160,7 @@ rcgen = "0.13"
|
|||
regex = "1.3"
|
||||
rustversion = "1"
|
||||
rustls-pemfile = "2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
static_assertions = "1"
|
||||
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.9.0)
|
||||
[](https://docs.rs/actix-http/3.10.0)
|
||||

|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.9.0)
|
||||
[](https://deps.rs/crate/actix-http/3.10.0)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use actix_http::HttpService;
|
||||
use actix_server::Server;
|
||||
use actix_service::map_config;
|
||||
use actix_web::{dev::AppConfig, get, App};
|
||||
use actix_web::{dev::AppConfig, get, App, Responder};
|
||||
|
||||
#[get("/")]
|
||||
async fn index() -> &'static str {
|
||||
async fn index() -> impl Responder {
|
||||
"Hello, world. From Actix Web!"
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ async fn main() -> io::Result<()> {
|
|||
body.extend_from_slice(&item?);
|
||||
}
|
||||
|
||||
info!("request body: {:?}", body);
|
||||
info!("request body: {body:?}");
|
||||
|
||||
let res = Response::build(StatusCode::OK)
|
||||
.insert_header(("x-head", HeaderValue::from_static("dummy value!")))
|
||||
|
@ -31,8 +31,7 @@ async fn main() -> io::Result<()> {
|
|||
|
||||
Ok::<_, Error>(res)
|
||||
})
|
||||
// No TLS
|
||||
.tcp()
|
||||
.tcp() // No TLS
|
||||
})?
|
||||
.run()
|
||||
.await
|
||||
|
|
|
@ -17,7 +17,7 @@ async fn main() -> io::Result<()> {
|
|||
ext.insert(42u32);
|
||||
})
|
||||
.finish(|req: Request| async move {
|
||||
info!("{:?}", req);
|
||||
info!("{req:?}");
|
||||
|
||||
let mut res = Response::build(StatusCode::OK);
|
||||
res.insert_header(("x-head", HeaderValue::from_static("dummy value!")));
|
||||
|
|
|
@ -22,16 +22,16 @@ async fn main() -> io::Result<()> {
|
|||
.bind("streaming-error", ("127.0.0.1", 8080), || {
|
||||
HttpService::build()
|
||||
.finish(|req| async move {
|
||||
info!("{:?}", req);
|
||||
info!("{req:?}");
|
||||
let res = Response::ok();
|
||||
|
||||
Ok::<_, Infallible>(res.set_body(BodyStream::new(stream! {
|
||||
yield Ok(Bytes::from("123"));
|
||||
yield Ok(Bytes::from("456"));
|
||||
|
||||
actix_rt::time::sleep(Duration::from_millis(1000)).await;
|
||||
actix_rt::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
yield Err(io::Error::new(io::ErrorKind::Other, ""));
|
||||
yield Err(io::Error::new(io::ErrorKind::Other, "abc"));
|
||||
})))
|
||||
})
|
||||
.tcp()
|
||||
|
|
|
@ -17,7 +17,6 @@ use bytes::{Bytes, BytesMut};
|
|||
use bytestring::ByteString;
|
||||
use futures_core::{ready, Stream};
|
||||
use tokio_util::codec::Encoder;
|
||||
use tracing::{info, trace};
|
||||
|
||||
#[actix_rt::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
|
@ -37,12 +36,12 @@ async fn main() -> io::Result<()> {
|
|||
}
|
||||
|
||||
async fn handler(req: Request) -> Result<Response<BodyStream<Heartbeat>>, Error> {
|
||||
info!("handshaking");
|
||||
tracing::info!("handshaking");
|
||||
let mut res = ws::handshake(req.head())?;
|
||||
|
||||
// handshake will always fail under HTTP/2
|
||||
|
||||
info!("responding");
|
||||
tracing::info!("responding");
|
||||
res.message_body(BodyStream::new(Heartbeat::new(ws::Codec::new())))
|
||||
}
|
||||
|
||||
|
@ -64,7 +63,7 @@ impl Stream for Heartbeat {
|
|||
type Item = Result<Bytes, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
trace!("poll");
|
||||
tracing::trace!("poll");
|
||||
|
||||
ready!(self.as_mut().interval.poll_tick(cx));
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ mod tests {
|
|||
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
|
||||
}
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "stream error")]
|
||||
#[display("stream error")]
|
||||
struct StreamErr;
|
||||
|
||||
#[actix_rt::test]
|
||||
|
|
|
@ -38,7 +38,7 @@ pub async fn to_bytes<B: MessageBody>(body: B) -> Result<Bytes, B::Error> {
|
|||
|
||||
/// Error type returned from [`to_bytes_limited`] when body produced exceeds limit.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "limit exceeded while collecting body bytes")]
|
||||
#[display("limit exceeded while collecting body bytes")]
|
||||
#[non_exhaustive]
|
||||
pub struct BodyLimitExceeded;
|
||||
|
||||
|
|
|
@ -415,11 +415,11 @@ fn new_brotli_compressor() -> Box<brotli::CompressorWriter<Writer>> {
|
|||
#[non_exhaustive]
|
||||
pub enum EncoderError {
|
||||
/// Wrapped body stream error.
|
||||
#[display(fmt = "body")]
|
||||
#[display("body")]
|
||||
Body(Box<dyn StdError>),
|
||||
|
||||
/// Generic I/O error.
|
||||
#[display(fmt = "io")]
|
||||
#[display("io")]
|
||||
Io(io::Error),
|
||||
}
|
||||
|
||||
|
|
|
@ -80,28 +80,28 @@ impl From<Error> for Response<BoxBody> {
|
|||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)]
|
||||
pub(crate) enum Kind {
|
||||
#[display(fmt = "error processing HTTP")]
|
||||
#[display("error processing HTTP")]
|
||||
Http,
|
||||
|
||||
#[display(fmt = "error parsing HTTP message")]
|
||||
#[display("error parsing HTTP message")]
|
||||
Parse,
|
||||
|
||||
#[display(fmt = "request payload read error")]
|
||||
#[display("request payload read error")]
|
||||
Payload,
|
||||
|
||||
#[display(fmt = "response body write error")]
|
||||
#[display("response body write error")]
|
||||
Body,
|
||||
|
||||
#[display(fmt = "send response error")]
|
||||
#[display("send response error")]
|
||||
SendResponse,
|
||||
|
||||
#[display(fmt = "error in WebSocket process")]
|
||||
#[display("error in WebSocket process")]
|
||||
Ws,
|
||||
|
||||
#[display(fmt = "connection error")]
|
||||
#[display("connection error")]
|
||||
Io,
|
||||
|
||||
#[display(fmt = "encoder error")]
|
||||
#[display("encoder error")]
|
||||
Encoder,
|
||||
}
|
||||
|
||||
|
@ -160,44 +160,44 @@ impl From<crate::ws::ProtocolError> for Error {
|
|||
#[non_exhaustive]
|
||||
pub enum ParseError {
|
||||
/// An invalid `Method`, such as `GE.T`.
|
||||
#[display(fmt = "invalid method specified")]
|
||||
#[display("invalid method specified")]
|
||||
Method,
|
||||
|
||||
/// An invalid `Uri`, such as `exam ple.domain`.
|
||||
#[display(fmt = "URI error: {}", _0)]
|
||||
#[display("URI error: {}", _0)]
|
||||
Uri(InvalidUri),
|
||||
|
||||
/// An invalid `HttpVersion`, such as `HTP/1.1`
|
||||
#[display(fmt = "invalid HTTP version specified")]
|
||||
#[display("invalid HTTP version specified")]
|
||||
Version,
|
||||
|
||||
/// An invalid `Header`.
|
||||
#[display(fmt = "invalid Header provided")]
|
||||
#[display("invalid Header provided")]
|
||||
Header,
|
||||
|
||||
/// A message head is too large to be reasonable.
|
||||
#[display(fmt = "message head is too large")]
|
||||
#[display("message head is too large")]
|
||||
TooLarge,
|
||||
|
||||
/// A message reached EOF, but is not complete.
|
||||
#[display(fmt = "message is incomplete")]
|
||||
#[display("message is incomplete")]
|
||||
Incomplete,
|
||||
|
||||
/// An invalid `Status`, such as `1337 ELITE`.
|
||||
#[display(fmt = "invalid status provided")]
|
||||
#[display("invalid status provided")]
|
||||
Status,
|
||||
|
||||
/// A timeout occurred waiting for an IO event.
|
||||
#[allow(dead_code)]
|
||||
#[display(fmt = "timeout")]
|
||||
#[display("timeout")]
|
||||
Timeout,
|
||||
|
||||
/// An I/O error that occurred while trying to read or write to a network stream.
|
||||
#[display(fmt = "I/O error: {}", _0)]
|
||||
#[display("I/O error: {}", _0)]
|
||||
Io(io::Error),
|
||||
|
||||
/// Parsing a field as string failed.
|
||||
#[display(fmt = "UTF-8 error: {}", _0)]
|
||||
#[display("UTF-8 error: {}", _0)]
|
||||
Utf8(Utf8Error),
|
||||
}
|
||||
|
||||
|
@ -256,28 +256,28 @@ impl From<ParseError> for Response<BoxBody> {
|
|||
#[non_exhaustive]
|
||||
pub enum PayloadError {
|
||||
/// A payload reached EOF, but is not complete.
|
||||
#[display(fmt = "payload reached EOF before completing: {:?}", _0)]
|
||||
#[display("payload reached EOF before completing: {:?}", _0)]
|
||||
Incomplete(Option<io::Error>),
|
||||
|
||||
/// Content encoding stream corruption.
|
||||
#[display(fmt = "can not decode content-encoding")]
|
||||
#[display("can not decode content-encoding")]
|
||||
EncodingCorrupted,
|
||||
|
||||
/// Payload reached size limit.
|
||||
#[display(fmt = "payload reached size limit")]
|
||||
#[display("payload reached size limit")]
|
||||
Overflow,
|
||||
|
||||
/// Payload length is unknown.
|
||||
#[display(fmt = "payload length is unknown")]
|
||||
#[display("payload length is unknown")]
|
||||
UnknownLength,
|
||||
|
||||
/// HTTP/2 payload error.
|
||||
#[cfg(feature = "http2")]
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Http2Payload(::h2::Error),
|
||||
|
||||
/// Generic I/O error.
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Io(io::Error),
|
||||
}
|
||||
|
||||
|
@ -326,44 +326,44 @@ impl From<PayloadError> for Error {
|
|||
#[non_exhaustive]
|
||||
pub enum DispatchError {
|
||||
/// Service error.
|
||||
#[display(fmt = "service error")]
|
||||
#[display("service error")]
|
||||
Service(Response<BoxBody>),
|
||||
|
||||
/// Body streaming error.
|
||||
#[display(fmt = "body error: {}", _0)]
|
||||
#[display("body error: {}", _0)]
|
||||
Body(Box<dyn StdError>),
|
||||
|
||||
/// Upgrade service error.
|
||||
#[display(fmt = "upgrade error")]
|
||||
#[display("upgrade error")]
|
||||
Upgrade,
|
||||
|
||||
/// An `io::Error` that occurred while trying to read or write to a network stream.
|
||||
#[display(fmt = "I/O error: {}", _0)]
|
||||
#[display("I/O error: {}", _0)]
|
||||
Io(io::Error),
|
||||
|
||||
/// Request parse error.
|
||||
#[display(fmt = "request parse error: {}", _0)]
|
||||
#[display("request parse error: {}", _0)]
|
||||
Parse(ParseError),
|
||||
|
||||
/// HTTP/2 error.
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
#[cfg(feature = "http2")]
|
||||
H2(h2::Error),
|
||||
|
||||
/// The first request did not complete within the specified timeout.
|
||||
#[display(fmt = "request did not complete within the specified timeout")]
|
||||
#[display("request did not complete within the specified timeout")]
|
||||
SlowRequestTimeout,
|
||||
|
||||
/// Disconnect timeout. Makes sense for TLS streams.
|
||||
#[display(fmt = "connection shutdown timeout")]
|
||||
#[display("connection shutdown timeout")]
|
||||
DisconnectTimeout,
|
||||
|
||||
/// Handler dropped payload before reading EOF.
|
||||
#[display(fmt = "handler dropped payload before reading EOF")]
|
||||
#[display("handler dropped payload before reading EOF")]
|
||||
HandlerDroppedPayload,
|
||||
|
||||
/// Internal error.
|
||||
#[display(fmt = "internal error")]
|
||||
#[display("internal error")]
|
||||
InternalError,
|
||||
}
|
||||
|
||||
|
@ -389,11 +389,11 @@ impl StdError for DispatchError {
|
|||
#[non_exhaustive]
|
||||
pub enum ContentTypeError {
|
||||
/// Can not parse content type.
|
||||
#[display(fmt = "could not parse content type")]
|
||||
#[display("could not parse content type")]
|
||||
ParseError,
|
||||
|
||||
/// Unknown content encoding.
|
||||
#[display(fmt = "unknown content encoding")]
|
||||
#[display("unknown content encoding")]
|
||||
UnknownEncoding,
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ impl Hasher for NoOpHasher {
|
|||
/// All entries into this map must be owned types (or static references).
|
||||
#[derive(Default)]
|
||||
pub struct Extensions {
|
||||
/// Use AHasher with a std HashMap with for faster lookups on the small `TypeId` keys.
|
||||
// use no-op hasher with a std HashMap with for faster lookups on the small `TypeId` keys
|
||||
map: HashMap<TypeId, Box<dyn Any>, BuildHasherDefault<NoOpHasher>>,
|
||||
}
|
||||
|
||||
|
@ -104,6 +104,46 @@ impl Extensions {
|
|||
.and_then(|boxed| boxed.downcast_mut())
|
||||
}
|
||||
|
||||
/// Inserts the given `value` into the extensions if it is not present, then returns a reference
|
||||
/// to the value in the extensions.
|
||||
///
|
||||
/// ```
|
||||
/// # use actix_http::Extensions;
|
||||
/// let mut map = Extensions::new();
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), None);
|
||||
///
|
||||
/// map.get_or_insert(Vec::<u32>::new()).push(1);
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1]));
|
||||
///
|
||||
/// map.get_or_insert(Vec::<u32>::new()).push(2);
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1,2]));
|
||||
/// ```
|
||||
pub fn get_or_insert<T: 'static>(&mut self, value: T) -> &mut T {
|
||||
self.get_or_insert_with(|| value)
|
||||
}
|
||||
|
||||
/// Inserts a value computed from `f` into the extensions if the given `value` is not present,
|
||||
/// then returns a reference to the value in the extensions.
|
||||
///
|
||||
/// ```
|
||||
/// # use actix_http::Extensions;
|
||||
/// let mut map = Extensions::new();
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), None);
|
||||
///
|
||||
/// map.get_or_insert_with(Vec::<u32>::new).push(1);
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1]));
|
||||
///
|
||||
/// map.get_or_insert_with(Vec::<u32>::new).push(2);
|
||||
/// assert_eq!(map.get::<Vec<u32>>(), Some(&vec![1,2]));
|
||||
/// ```
|
||||
pub fn get_or_insert_with<T: 'static, F: FnOnce() -> T>(&mut self, default: F) -> &mut T {
|
||||
self.map
|
||||
.entry(TypeId::of::<T>())
|
||||
.or_insert_with(|| Box::new(default()))
|
||||
.downcast_mut()
|
||||
.expect("extensions map should now contain a T value")
|
||||
}
|
||||
|
||||
/// Remove an item from the map of a given type.
|
||||
///
|
||||
/// If an item of this type was already stored, it will be returned.
|
||||
|
|
|
@ -313,7 +313,7 @@ impl MessageType for RequestHeadType {
|
|||
_ => return Err(io::Error::new(io::ErrorKind::Other, "unsupported version")),
|
||||
}
|
||||
)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -433,7 +433,7 @@ impl TransferEncoding {
|
|||
buf.extend_from_slice(b"0\r\n\r\n");
|
||||
} else {
|
||||
writeln!(helpers::MutWriter(buf), "{:X}\r", msg.len())
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
|
||||
|
||||
buf.reserve(msg.len() + 2);
|
||||
buf.extend_from_slice(msg);
|
||||
|
|
|
@ -480,15 +480,15 @@ where
|
|||
let cfg = self.cfg.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let expect = expect
|
||||
.await
|
||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||
let expect = expect.await.map_err(|err| {
|
||||
tracing::error!("Initialization of HTTP expect service error: {err:?}");
|
||||
})?;
|
||||
|
||||
let upgrade = match upgrade {
|
||||
Some(upgrade) => {
|
||||
let upgrade = upgrade
|
||||
.await
|
||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||
let upgrade = upgrade.await.map_err(|err| {
|
||||
tracing::error!("Initialization of HTTP upgrade service error: {err:?}");
|
||||
})?;
|
||||
Some(upgrade)
|
||||
}
|
||||
None => None,
|
||||
|
@ -496,7 +496,7 @@ where
|
|||
|
||||
let service = service
|
||||
.await
|
||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||
.map_err(|err| error!("Initialization of HTTP service error: {err:?}"))?;
|
||||
|
||||
Ok(H1ServiceHandler::new(
|
||||
cfg,
|
||||
|
|
|
@ -18,6 +18,14 @@ pub const CACHE_STATUS: HeaderName = HeaderName::from_static("cache-status");
|
|||
// TODO(breaking): replace with http's version
|
||||
pub const CDN_CACHE_CONTROL: HeaderName = HeaderName::from_static("cdn-cache-control");
|
||||
|
||||
/// Response header field that sends a signal to the user agent that it ought to remove all data of
|
||||
/// a certain set of types.
|
||||
///
|
||||
/// See the [W3C Clear-Site-Data spec] for full semantics.
|
||||
///
|
||||
/// [W3C Clear-Site-Data spec]: https://www.w3.org/TR/clear-site-data/#header
|
||||
pub const CLEAR_SITE_DATA: HeaderName = HeaderName::from_static("clear-site-data");
|
||||
|
||||
/// Response header that prevents a document from loading any cross-origin resources that don't
|
||||
/// explicitly grant the document permission (using [CORP] or [CORS]).
|
||||
///
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::{borrow::Cow, collections::hash_map, iter, ops};
|
||||
|
||||
use ahash::AHashMap;
|
||||
use foldhash::{HashMap as FoldHashMap, HashMapExt as _};
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
|
@ -47,7 +47,7 @@ use super::AsHeaderName;
|
|||
/// ```
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct HeaderMap {
|
||||
pub(crate) inner: AHashMap<HeaderName, Value>,
|
||||
pub(crate) inner: FoldHashMap<HeaderName, Value>,
|
||||
}
|
||||
|
||||
/// A bespoke non-empty list for HeaderMap values.
|
||||
|
@ -116,7 +116,7 @@ impl HeaderMap {
|
|||
/// ```
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
HeaderMap {
|
||||
inner: AHashMap::with_capacity(capacity),
|
||||
inner: FoldHashMap::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -830,7 +830,7 @@ impl<'a> Drain<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Drain<'a> {
|
||||
impl Iterator for Drain<'_> {
|
||||
type Item = (Option<HeaderName>, HeaderValue);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -42,9 +42,9 @@ pub use self::{
|
|||
as_name::AsHeaderName,
|
||||
// re-export list is explicit so that any updates to `http` do not conflict with this set
|
||||
common::{
|
||||
CACHE_STATUS, CDN_CACHE_CONTROL, CROSS_ORIGIN_EMBEDDER_POLICY, CROSS_ORIGIN_OPENER_POLICY,
|
||||
CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY, X_FORWARDED_FOR, X_FORWARDED_HOST,
|
||||
X_FORWARDED_PROTO,
|
||||
CACHE_STATUS, CDN_CACHE_CONTROL, CLEAR_SITE_DATA, CROSS_ORIGIN_EMBEDDER_POLICY,
|
||||
CROSS_ORIGIN_OPENER_POLICY, CROSS_ORIGIN_RESOURCE_POLICY, PERMISSIONS_POLICY,
|
||||
X_FORWARDED_FOR, X_FORWARDED_HOST, X_FORWARDED_PROTO,
|
||||
},
|
||||
into_pair::TryIntoHeaderPair,
|
||||
into_value::TryIntoHeaderValue,
|
||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
|||
|
||||
/// Error returned when a content encoding is unknown.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "unsupported content encoding")]
|
||||
#[display("unsupported content encoding")]
|
||||
pub struct ContentEncodingParseError;
|
||||
|
||||
/// Represents a supported content encoding.
|
||||
|
|
|
@ -125,7 +125,7 @@ pub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Re
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Display, Error)]
|
||||
#[display(fmt = "quality out of bounds")]
|
||||
#[display("quality out of bounds")]
|
||||
#[non_exhaustive]
|
||||
pub struct QualityOutOfBounds;
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ pub fn write_content_length<B: BufMut>(n: u64, buf: &mut B, camel_case: bool) {
|
|||
/// perform a remaining length check before writing.
|
||||
pub(crate) struct MutWriter<'a, B>(pub(crate) &'a mut B);
|
||||
|
||||
impl<'a, B> io::Write for MutWriter<'a, B>
|
||||
impl<B> io::Write for MutWriter<'_, B>
|
||||
where
|
||||
B: BufMut,
|
||||
{
|
||||
|
|
|
@ -103,7 +103,7 @@ pub trait HttpMessage: Sized {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, T> HttpMessage for &'a mut T
|
||||
impl<T> HttpMessage for &mut T
|
||||
where
|
||||
T: HttpMessage,
|
||||
{
|
||||
|
|
|
@ -41,13 +41,31 @@ pin_project! {
|
|||
}
|
||||
|
||||
impl<S> From<crate::h1::Payload> for Payload<S> {
|
||||
#[inline]
|
||||
fn from(payload: crate::h1::Payload) -> Self {
|
||||
Payload::H1 { payload }
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> From<Bytes> for Payload<S> {
|
||||
#[inline]
|
||||
fn from(bytes: Bytes) -> Self {
|
||||
let (_, mut pl) = crate::h1::Payload::create(true);
|
||||
pl.unread_data(bytes);
|
||||
self::Payload::from(pl)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> From<Vec<u8>> for Payload<S> {
|
||||
#[inline]
|
||||
fn from(vec: Vec<u8>) -> Self {
|
||||
Payload::from(Bytes::from(vec))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "http2")]
|
||||
impl<S> From<crate::h2::Payload> for Payload<S> {
|
||||
#[inline]
|
||||
fn from(payload: crate::h2::Payload) -> Self {
|
||||
Payload::H2 { payload }
|
||||
}
|
||||
|
@ -55,6 +73,7 @@ impl<S> From<crate::h2::Payload> for Payload<S> {
|
|||
|
||||
#[cfg(feature = "http2")]
|
||||
impl<S> From<::h2::RecvStream> for Payload<S> {
|
||||
#[inline]
|
||||
fn from(stream: ::h2::RecvStream) -> Self {
|
||||
Payload::H2 {
|
||||
payload: crate::h2::Payload::new(stream),
|
||||
|
@ -63,13 +82,15 @@ impl<S> From<::h2::RecvStream> for Payload<S> {
|
|||
}
|
||||
|
||||
impl From<BoxedPayloadStream> for Payload {
|
||||
#[inline]
|
||||
fn from(payload: BoxedPayloadStream) -> Self {
|
||||
Payload::Stream { payload }
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Payload<S> {
|
||||
/// Takes current payload and replaces it with `None` value
|
||||
/// Takes current payload and replaces it with `None` value.
|
||||
#[must_use]
|
||||
pub fn take(&mut self) -> Payload<S> {
|
||||
mem::replace(self, Payload::None)
|
||||
}
|
||||
|
|
|
@ -775,23 +775,23 @@ where
|
|||
let cfg = self.cfg.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let expect = expect
|
||||
.await
|
||||
.map_err(|e| error!("Init http expect service error: {:?}", e))?;
|
||||
let expect = expect.await.map_err(|err| {
|
||||
tracing::error!("Initialization of HTTP expect service error: {err:?}");
|
||||
})?;
|
||||
|
||||
let upgrade = match upgrade {
|
||||
Some(upgrade) => {
|
||||
let upgrade = upgrade
|
||||
.await
|
||||
.map_err(|e| error!("Init http upgrade service error: {:?}", e))?;
|
||||
let upgrade = upgrade.await.map_err(|err| {
|
||||
tracing::error!("Initialization of HTTP upgrade service error: {err:?}");
|
||||
})?;
|
||||
Some(upgrade)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let service = service
|
||||
.await
|
||||
.map_err(|e| error!("Init http service error: {:?}", e))?;
|
||||
let service = service.await.map_err(|err| {
|
||||
tracing::error!("Initialization of HTTP service error: {err:?}");
|
||||
})?;
|
||||
|
||||
Ok(HttpServiceHandler::new(
|
||||
cfg,
|
||||
|
|
|
@ -114,14 +114,14 @@ mod inner {
|
|||
{
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
DispatcherError::Service(ref e) => {
|
||||
write!(fmt, "DispatcherError::Service({:?})", e)
|
||||
DispatcherError::Service(ref err) => {
|
||||
write!(fmt, "DispatcherError::Service({err:?})")
|
||||
}
|
||||
DispatcherError::Encoder(ref e) => {
|
||||
write!(fmt, "DispatcherError::Encoder({:?})", e)
|
||||
DispatcherError::Encoder(ref err) => {
|
||||
write!(fmt, "DispatcherError::Encoder({err:?})")
|
||||
}
|
||||
DispatcherError::Decoder(ref e) => {
|
||||
write!(fmt, "DispatcherError::Decoder({:?})", e)
|
||||
DispatcherError::Decoder(ref err) => {
|
||||
write!(fmt, "DispatcherError::Decoder({err:?})")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -136,9 +136,9 @@ mod inner {
|
|||
{
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match *self {
|
||||
DispatcherError::Service(ref e) => write!(fmt, "{}", e),
|
||||
DispatcherError::Encoder(ref e) => write!(fmt, "{:?}", e),
|
||||
DispatcherError::Decoder(ref e) => write!(fmt, "{:?}", e),
|
||||
DispatcherError::Service(ref err) => write!(fmt, "{err}"),
|
||||
DispatcherError::Encoder(ref err) => write!(fmt, "{err:?}"),
|
||||
DispatcherError::Decoder(ref err) => write!(fmt, "{err:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,43 +27,43 @@ pub use self::{
|
|||
#[derive(Debug, Display, Error, From)]
|
||||
pub enum ProtocolError {
|
||||
/// Received an unmasked frame from client.
|
||||
#[display(fmt = "received an unmasked frame from client")]
|
||||
#[display("received an unmasked frame from client")]
|
||||
UnmaskedFrame,
|
||||
|
||||
/// Received a masked frame from server.
|
||||
#[display(fmt = "received a masked frame from server")]
|
||||
#[display("received a masked frame from server")]
|
||||
MaskedFrame,
|
||||
|
||||
/// Encountered invalid opcode.
|
||||
#[display(fmt = "invalid opcode ({})", _0)]
|
||||
#[display("invalid opcode ({})", _0)]
|
||||
InvalidOpcode(#[error(not(source))] u8),
|
||||
|
||||
/// Invalid control frame length
|
||||
#[display(fmt = "invalid control frame length ({})", _0)]
|
||||
#[display("invalid control frame length ({})", _0)]
|
||||
InvalidLength(#[error(not(source))] usize),
|
||||
|
||||
/// Bad opcode.
|
||||
#[display(fmt = "bad opcode")]
|
||||
#[display("bad opcode")]
|
||||
BadOpCode,
|
||||
|
||||
/// A payload reached size limit.
|
||||
#[display(fmt = "payload reached size limit")]
|
||||
#[display("payload reached size limit")]
|
||||
Overflow,
|
||||
|
||||
/// Continuation has not started.
|
||||
#[display(fmt = "continuation has not started")]
|
||||
#[display("continuation has not started")]
|
||||
ContinuationNotStarted,
|
||||
|
||||
/// Received new continuation but it is already started.
|
||||
#[display(fmt = "received new continuation but it has already started")]
|
||||
#[display("received new continuation but it has already started")]
|
||||
ContinuationStarted,
|
||||
|
||||
/// Unknown continuation fragment.
|
||||
#[display(fmt = "unknown continuation fragment: {}", _0)]
|
||||
#[display("unknown continuation fragment: {}", _0)]
|
||||
ContinuationFragment(#[error(not(source))] OpCode),
|
||||
|
||||
/// I/O error.
|
||||
#[display(fmt = "I/O error: {}", _0)]
|
||||
#[display("I/O error: {}", _0)]
|
||||
Io(io::Error),
|
||||
}
|
||||
|
||||
|
@ -71,27 +71,27 @@ pub enum ProtocolError {
|
|||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Display, Error)]
|
||||
pub enum HandshakeError {
|
||||
/// Only get method is allowed.
|
||||
#[display(fmt = "method not allowed")]
|
||||
#[display("method not allowed")]
|
||||
GetMethodRequired,
|
||||
|
||||
/// Upgrade header if not set to WebSocket.
|
||||
#[display(fmt = "WebSocket upgrade is expected")]
|
||||
#[display("WebSocket upgrade is expected")]
|
||||
NoWebsocketUpgrade,
|
||||
|
||||
/// Connection header is not set to upgrade.
|
||||
#[display(fmt = "connection upgrade is expected")]
|
||||
#[display("connection upgrade is expected")]
|
||||
NoConnectionUpgrade,
|
||||
|
||||
/// WebSocket version header is not set.
|
||||
#[display(fmt = "WebSocket version header is required")]
|
||||
#[display("WebSocket version header is required")]
|
||||
NoVersionHeader,
|
||||
|
||||
/// Unsupported WebSocket version.
|
||||
#[display(fmt = "unsupported WebSocket version")]
|
||||
#[display("unsupported WebSocket version")]
|
||||
UnsupportedVersion,
|
||||
|
||||
/// WebSocket key is not set or wrong.
|
||||
#[display(fmt = "unknown WebSocket key")]
|
||||
#[display("unknown WebSocket key")]
|
||||
BadWebsocketKey,
|
||||
}
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ async fn with_query_parameter() {
|
|||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "expect failed")]
|
||||
#[display("expect failed")]
|
||||
struct ExpectFailed;
|
||||
|
||||
impl From<ExpectFailed> for Response<BoxBody> {
|
||||
|
|
|
@ -398,7 +398,7 @@ async fn h2_response_http_error_handling() {
|
|||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "error")]
|
||||
#[display("error")]
|
||||
struct BadRequest;
|
||||
|
||||
impl From<BadRequest> for Response<BoxBody> {
|
||||
|
|
|
@ -480,7 +480,7 @@ async fn h2_response_http_error_handling() {
|
|||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "error")]
|
||||
#[display("error")]
|
||||
struct BadRequest;
|
||||
|
||||
impl From<BadRequest> for Response<BoxBody> {
|
||||
|
|
|
@ -16,6 +16,7 @@ use actix_utils::future::{err, ok, ready};
|
|||
use bytes::Bytes;
|
||||
use derive_more::{Display, Error};
|
||||
use futures_util::{stream::once, FutureExt as _, StreamExt as _};
|
||||
use rand::Rng as _;
|
||||
use regex::Regex;
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -62,7 +63,7 @@ async fn h1_2() {
|
|||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "expect failed")]
|
||||
#[display("expect failed")]
|
||||
struct ExpectFailed;
|
||||
|
||||
impl From<ExpectFailed> for Response<BoxBody> {
|
||||
|
@ -164,7 +165,10 @@ async fn chunked_payload() {
|
|||
|
||||
for chunk_size in chunk_sizes.iter() {
|
||||
let mut bytes = Vec::new();
|
||||
let random_bytes: Vec<u8> = (0..*chunk_size).map(|_| rand::random::<u8>()).collect();
|
||||
let random_bytes = rand::rng()
|
||||
.sample_iter(rand::distr::StandardUniform)
|
||||
.take(*chunk_size)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
bytes.extend(format!("{:X}\r\n", chunk_size).as_bytes());
|
||||
bytes.extend(&random_bytes[..]);
|
||||
|
@ -723,7 +727,7 @@ async fn h1_response_http_error_handling() {
|
|||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "error")]
|
||||
#[display("error")]
|
||||
struct BadRequest;
|
||||
|
||||
impl From<BadRequest> for Response<BoxBody> {
|
||||
|
|
|
@ -37,16 +37,16 @@ impl WsService {
|
|||
|
||||
#[derive(Debug, Display, Error, From)]
|
||||
enum WsServiceError {
|
||||
#[display(fmt = "HTTP error")]
|
||||
#[display("HTTP error")]
|
||||
Http(actix_http::Error),
|
||||
|
||||
#[display(fmt = "WS handshake error")]
|
||||
#[display("WS handshake error")]
|
||||
Ws(actix_http::ws::HandshakeError),
|
||||
|
||||
#[display(fmt = "I/O error")]
|
||||
#[display("I/O error")]
|
||||
Io(std::io::Error),
|
||||
|
||||
#[display(fmt = "dispatcher error")]
|
||||
#[display("dispatcher error")]
|
||||
Dispatcher,
|
||||
}
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ all-features = true
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
bytesize = "2"
|
||||
darling = "0.20"
|
||||
parse-size = "1"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = "2"
|
||||
|
@ -27,7 +27,7 @@ syn = "2"
|
|||
[dev-dependencies]
|
||||
actix-multipart = "0.7"
|
||||
actix-web = "4"
|
||||
rustversion = "1"
|
||||
rustversion-msrv = "0.100"
|
||||
trybuild = "1"
|
||||
|
||||
[lints]
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
|
||||
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![allow(clippy::disallowed_names)] // false positives in some macro expansions
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use bytesize::ByteSize;
|
||||
use darling::{FromDeriveInput, FromField, FromMeta};
|
||||
use parse_size::parse_size;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use quote::quote;
|
||||
|
@ -35,6 +36,7 @@ struct MultipartFormAttrs {
|
|||
duplicate_field: DuplicateField,
|
||||
}
|
||||
|
||||
#[allow(clippy::disallowed_names)] // false positive in macro expansion
|
||||
#[derive(FromField, Default)]
|
||||
#[darling(attributes(multipart), default)]
|
||||
struct FieldAttrs {
|
||||
|
@ -101,7 +103,7 @@ struct ParsedField<'t> {
|
|||
/// # Field Limits
|
||||
///
|
||||
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
|
||||
/// string is parsed using [parse_size].
|
||||
/// string is parsed using [`bytesize`].
|
||||
///
|
||||
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
|
||||
///
|
||||
|
@ -148,7 +150,7 @@ struct ParsedField<'t> {
|
|||
/// struct Form { }
|
||||
/// ```
|
||||
///
|
||||
/// [parse_size]: https://docs.rs/parse-size/1/parse_size
|
||||
/// [`bytesize`]: https://docs.rs/bytesize/2
|
||||
#[proc_macro_derive(MultipartForm, attributes(multipart))]
|
||||
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input: syn::DeriveInput = parse_macro_input!(input);
|
||||
|
@ -189,8 +191,8 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
|||
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
|
||||
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
|
||||
|
||||
let limit = match attrs.limit.map(|limit| match parse_size(&limit) {
|
||||
Ok(size) => Ok(usize::try_from(size).unwrap()),
|
||||
let limit = match attrs.limit.map(|limit| match limit.parse::<ByteSize>() {
|
||||
Ok(ByteSize(size)) => Ok(usize::try_from(size).unwrap()),
|
||||
Err(err) => Err(syn::Error::new(
|
||||
field.ident.as_ref().unwrap().span(),
|
||||
format!("Could not parse size limit `{}`: {}", limit, err),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#[rustversion::stable(1.72)] // MSRV
|
||||
#[rustversion_msrv::msrv]
|
||||
#[test]
|
||||
fn compile_macros() {
|
||||
let t = trybuild::TestCases::new();
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
error: Could not parse size limit `2 bytes`: invalid digit found in string
|
||||
error: Could not parse size limit `2 bytes`: couldn't parse "bytes" into a known SI unit, couldn't parse unit of "bytes"
|
||||
--> tests/trybuild/size-limit-parse-fail.rs:6:5
|
||||
|
|
||||
6 | description: Text<String>,
|
||||
| ^^^^^^^^^^^
|
||||
|
||||
error: Could not parse size limit `2 megabytes`: invalid digit found in string
|
||||
error: Could not parse size limit `2 megabytes`: couldn't parse "megabytes" into a known SI unit, couldn't parse unit of "megabytes"
|
||||
--> tests/trybuild/size-limit-parse-fail.rs:12:5
|
||||
|
|
||||
12 | description: Text<String>,
|
||||
| ^^^^^^^^^^^
|
||||
|
||||
error: Could not parse size limit `four meters`: invalid digit found in string
|
||||
error: Could not parse size limit `four meters`: couldn't parse "four meters" into a ByteSize, cannot parse float from empty string
|
||||
--> tests/trybuild/size-limit-parse-fail.rs:18:5
|
||||
|
|
||||
18 | description: Text<String>,
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 0.7.2
|
||||
|
||||
- Fix re-exported version of `actix-multipart-derive`.
|
||||
|
|
|
@ -42,7 +42,7 @@ actix-multipart-derive = { version = "=0.7.0", optional = true }
|
|||
actix-utils = "3"
|
||||
actix-web = { version = "4", default-features = false }
|
||||
|
||||
derive_more = "0.99.5"
|
||||
derive_more = { version = "2", features = ["display", "error", "from"] }
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
httparse = "1.3"
|
||||
|
@ -50,7 +50,7 @@ local-waker = "0.1"
|
|||
log = "0.4"
|
||||
memchr = "2.5"
|
||||
mime = "0.3"
|
||||
rand = "0.8"
|
||||
rand = "0.9"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_plain = "1"
|
||||
|
@ -59,7 +59,7 @@ tokio = { version = "1.24.2", features = ["sync", "io-util"] }
|
|||
|
||||
[dev-dependencies]
|
||||
actix-http = "3"
|
||||
actix-multipart-rfc7578 = "0.10"
|
||||
actix-multipart-rfc7578 = "0.11"
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1"
|
||||
actix-web = "4"
|
||||
|
|
|
@ -12,11 +12,11 @@ use derive_more::{Display, Error, From};
|
|||
#[non_exhaustive]
|
||||
pub enum Error {
|
||||
/// Could not find Content-Type header.
|
||||
#[display(fmt = "Could not find Content-Type header")]
|
||||
#[display("Could not find Content-Type header")]
|
||||
ContentTypeMissing,
|
||||
|
||||
/// Could not parse Content-Type header.
|
||||
#[display(fmt = "Could not parse Content-Type header")]
|
||||
#[display("Could not parse Content-Type header")]
|
||||
ContentTypeParse,
|
||||
|
||||
/// Parsed Content-Type did not have "multipart" top-level media type.
|
||||
|
@ -25,11 +25,11 @@ pub enum Error {
|
|||
/// "multipart/form-data" media type.
|
||||
///
|
||||
/// [`MultipartForm`]: struct@crate::form::MultipartForm
|
||||
#[display(fmt = "Parsed Content-Type did not have "multipart" top-level media type")]
|
||||
#[display("Parsed Content-Type did not have 'multipart' top-level media type")]
|
||||
ContentTypeIncompatible,
|
||||
|
||||
/// Multipart boundary is not found.
|
||||
#[display(fmt = "Multipart boundary is not found")]
|
||||
#[display("Multipart boundary is not found")]
|
||||
BoundaryMissing,
|
||||
|
||||
/// Content-Disposition header was not found or not of disposition type "form-data" when parsing
|
||||
|
@ -39,7 +39,7 @@ pub enum Error {
|
|||
/// always be present and have a disposition type of "form-data".
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
#[display("Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
ContentDispositionMissing,
|
||||
|
||||
/// Content-Disposition name parameter was not found when parsing a "form-data" field.
|
||||
|
@ -48,48 +48,48 @@ pub enum Error {
|
|||
/// always include a "name" parameter.
|
||||
///
|
||||
/// [RFC 7578 §4.2]: https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
||||
#[display(fmt = "Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
#[display("Content-Disposition header was not found when parsing a \"form-data\" field")]
|
||||
ContentDispositionNameMissing,
|
||||
|
||||
/// Nested multipart is not supported.
|
||||
#[display(fmt = "Nested multipart is not supported")]
|
||||
#[display("Nested multipart is not supported")]
|
||||
Nested,
|
||||
|
||||
/// Multipart stream is incomplete.
|
||||
#[display(fmt = "Multipart stream is incomplete")]
|
||||
#[display("Multipart stream is incomplete")]
|
||||
Incomplete,
|
||||
|
||||
/// Field parsing failed.
|
||||
#[display(fmt = "Error during field parsing")]
|
||||
#[display("Error during field parsing")]
|
||||
Parse(ParseError),
|
||||
|
||||
/// HTTP payload error.
|
||||
#[display(fmt = "Payload error")]
|
||||
#[display("Payload error")]
|
||||
Payload(PayloadError),
|
||||
|
||||
/// Stream is not consumed.
|
||||
#[display(fmt = "Stream is not consumed")]
|
||||
#[display("Stream is not consumed")]
|
||||
NotConsumed,
|
||||
|
||||
/// Form field handler raised error.
|
||||
#[display(fmt = "An error occurred processing field: {name}")]
|
||||
#[display("An error occurred processing field: {name}")]
|
||||
Field {
|
||||
name: String,
|
||||
source: actix_web::Error,
|
||||
},
|
||||
|
||||
/// Duplicate field found (for structure that opted-in to denying duplicate fields).
|
||||
#[display(fmt = "Duplicate field found: {_0}")]
|
||||
#[display("Duplicate field found: {_0}")]
|
||||
#[from(ignore)]
|
||||
DuplicateField(#[error(not(source))] String),
|
||||
|
||||
/// Required field is missing.
|
||||
#[display(fmt = "Required field is missing: {_0}")]
|
||||
#[display("Required field is missing: {_0}")]
|
||||
#[from(ignore)]
|
||||
MissingField(#[error(not(source))] String),
|
||||
|
||||
/// Unknown field (for structure that opted-in to denying unknown fields).
|
||||
#[display(fmt = "Unknown field: {_0}")]
|
||||
#[display("Unknown field: {_0}")]
|
||||
#[from(ignore)]
|
||||
UnknownField(#[error(not(source))] String),
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ use crate::{
|
|||
|
||||
/// Error type returned from [`Field::bytes()`] when field data is larger than limit.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "size limit exceeded while collecting field data")]
|
||||
#[display("size limit exceeded while collecting field data")]
|
||||
#[non_exhaustive]
|
||||
pub struct LimitExceeded;
|
||||
|
||||
|
|
|
@ -66,11 +66,11 @@ where
|
|||
#[non_exhaustive]
|
||||
pub enum JsonFieldError {
|
||||
/// Deserialize error.
|
||||
#[display(fmt = "Json deserialize error: {}", _0)]
|
||||
#[display("Json deserialize error: {}", _0)]
|
||||
Deserialize(serde_json::Error),
|
||||
|
||||
/// Content type error.
|
||||
#[display(fmt = "Content type error")]
|
||||
#[display("Content type error")]
|
||||
ContentType,
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ impl<'t> FieldReader<'t> for TempFile {
|
|||
#[non_exhaustive]
|
||||
pub enum TempFileError {
|
||||
/// File I/O Error
|
||||
#[display(fmt = "File I/O error: {}", _0)]
|
||||
#[display("File I/O error: {}", _0)]
|
||||
FileIo(std::io::Error),
|
||||
}
|
||||
|
||||
|
|
|
@ -77,15 +77,15 @@ where
|
|||
#[non_exhaustive]
|
||||
pub enum TextError {
|
||||
/// UTF-8 decoding error.
|
||||
#[display(fmt = "UTF-8 decoding error: {}", _0)]
|
||||
#[display("UTF-8 decoding error: {}", _0)]
|
||||
Utf8Error(str::Utf8Error),
|
||||
|
||||
/// Deserialize error.
|
||||
#[display(fmt = "Plain text deserialize error: {}", _0)]
|
||||
#[display("Plain text deserialize error: {}", _0)]
|
||||
Deserialize(serde_plain::Error),
|
||||
|
||||
/// Content type error.
|
||||
#[display(fmt = "Content type error")]
|
||||
#[display("Content type error")]
|
||||
ContentType,
|
||||
}
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@ use actix_web::{
|
|||
web::{BufMut as _, Bytes, BytesMut},
|
||||
};
|
||||
use mime::Mime;
|
||||
use rand::{
|
||||
distributions::{Alphanumeric, DistString as _},
|
||||
thread_rng,
|
||||
};
|
||||
use rand::distr::{Alphanumeric, SampleString as _};
|
||||
|
||||
const CRLF: &[u8] = b"\r\n";
|
||||
const CRLF_CRLF: &[u8] = b"\r\n\r\n";
|
||||
|
@ -64,7 +61,7 @@ pub fn create_form_data_payload_and_headers(
|
|||
content_type: Option<Mime>,
|
||||
file: Bytes,
|
||||
) -> (Bytes, HeaderMap) {
|
||||
let boundary = Alphanumeric.sample_string(&mut thread_rng(), 32);
|
||||
let boundary = Alphanumeric.sample_string(&mut rand::rng(), 32);
|
||||
|
||||
create_form_data_payload_and_headers_with_boundary(
|
||||
&boundary,
|
||||
|
|
|
@ -143,9 +143,9 @@ impl<T: ResourcePath> Path<T> {
|
|||
for (seg_name, val) in self.segments.iter() {
|
||||
if name == seg_name {
|
||||
return match val {
|
||||
PathItem::Static(ref s) => Some(s),
|
||||
PathItem::Segment(s, e) => {
|
||||
Some(&self.path.path()[(*s as usize)..(*e as usize)])
|
||||
PathItem::Static(ref seg) => Some(seg),
|
||||
PathItem::Segment(start, end) => {
|
||||
Some(&self.path.path()[(*start as usize)..(*end as usize)])
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -193,8 +193,10 @@ impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
|
|||
if self.idx < self.params.segment_count() {
|
||||
let idx = self.idx;
|
||||
let res = match self.params.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
|
||||
PathItem::Static(ref seg) => seg,
|
||||
PathItem::Segment(start, end) => {
|
||||
&self.params.path.path()[(start as usize)..(end as usize)]
|
||||
}
|
||||
};
|
||||
self.idx += 1;
|
||||
return Some((&self.params.segments[idx].0, res));
|
||||
|
@ -217,8 +219,8 @@ impl<T: ResourcePath> Index<usize> for Path<T> {
|
|||
|
||||
fn index(&self, idx: usize) -> &str {
|
||||
match self.segments[idx].1 {
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
|
||||
PathItem::Static(ref seg) => seg,
|
||||
PathItem::Segment(start, end) => &self.path.path()[(start as usize)..(end as usize)],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -105,7 +105,7 @@ fn hex_pair_to_char(d1: u8, d2: u8) -> Option<u8> {
|
|||
let d_low = char::from(d2).to_digit(16)?;
|
||||
|
||||
// left shift high nibble by 4 bits
|
||||
Some((d_high as u8) << 4 | (d_low as u8))
|
||||
Some(((d_high as u8) << 4) | (d_low as u8))
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
|
|
|
@ -1021,6 +1021,7 @@ impl ResourceDef {
|
|||
panic!("prefix resource definitions should not have tail segments");
|
||||
}
|
||||
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
if unprocessed.ends_with('*') {
|
||||
// unnamed tail segment
|
||||
|
||||
|
@ -1369,6 +1370,7 @@ mod tests {
|
|||
assert_eq!(path.unprocessed(), "");
|
||||
}
|
||||
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
#[test]
|
||||
fn newline_patterns_and_paths() {
|
||||
let re = ResourceDef::new("/user/a\nb");
|
||||
|
|
|
@ -19,7 +19,7 @@ impl ResourcePath for String {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> ResourcePath for &'a str {
|
||||
impl ResourcePath for &str {
|
||||
fn path(&self) -> &str {
|
||||
self
|
||||
}
|
||||
|
|
|
@ -145,6 +145,7 @@ mod tests {
|
|||
};
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
#[allow(clippy::literal_string_with_formatting_args)]
|
||||
#[test]
|
||||
fn test_recognizer_1() {
|
||||
let mut router = Router::<usize>::build();
|
||||
|
|
|
@ -796,11 +796,8 @@ where
|
|||
Some(frm) => {
|
||||
let msg = match frm {
|
||||
Frame::Text(data) => {
|
||||
Message::Text(ByteString::try_from(data).map_err(|e| {
|
||||
ProtocolError::Io(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!("{}", e),
|
||||
))
|
||||
Message::Text(ByteString::try_from(data).map_err(|err| {
|
||||
ProtocolError::Io(io::Error::new(io::ErrorKind::Other, err))
|
||||
})?)
|
||||
}
|
||||
Frame::Binary(data) => Message::Binary(data),
|
||||
|
|
|
@ -34,7 +34,7 @@ actix-web = "4"
|
|||
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
rustversion = "1"
|
||||
rustversion-msrv = "0.100"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -136,7 +136,7 @@ async fn routes_overlapping_inaccessible_test(req: HttpRequest) -> impl Responde
|
|||
}
|
||||
|
||||
#[get("/custom_resource_name", name = "custom")]
|
||||
async fn custom_resource_name_test<'a>(req: HttpRequest) -> impl Responder {
|
||||
async fn custom_resource_name_test(req: HttpRequest) -> impl Responder {
|
||||
assert!(req.url_for_static("custom").is_ok());
|
||||
assert!(req.url_for_static("custom_resource_name_test").is_err());
|
||||
HttpResponse::Ok()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#[rustversion::stable(1.72)] // MSRV
|
||||
#[rustversion_msrv::msrv]
|
||||
#[test]
|
||||
fn compile_macros() {
|
||||
let t = trybuild::TestCases::new();
|
||||
|
|
|
@ -2,6 +2,28 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
## 4.10.2
|
||||
|
||||
- No significant changes since `4.10.1`.
|
||||
|
||||
## 4.10.1
|
||||
|
||||
- No significant changes since `4.10.0`.
|
||||
|
||||
## 4.10.0
|
||||
|
||||
### Added
|
||||
|
||||
- Implement `Responder` for `Result<(), E: Into<Error>>`. Returning `Ok(())` responds with HTTP 204 No Content.
|
||||
|
||||
### Changed
|
||||
|
||||
- On Windows, an error is now returned from `HttpServer::bind()` (or TLS variants) when binding to a socket that's already in use.
|
||||
- Update `brotli` dependency to `7`.
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 4.9.0
|
||||
|
||||
### Added
|
||||
|
||||
- Add `middleware::from_fn()` helper.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.8.0"
|
||||
version = "4.10.2"
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
|
@ -137,17 +137,17 @@ actix-service = "2"
|
|||
actix-utils = "3"
|
||||
actix-tls = { version = "3.4", default-features = false, optional = true }
|
||||
|
||||
actix-http = { version = "3.7", features = ["ws"] }
|
||||
actix-http = { version = "3.10", features = ["ws"] }
|
||||
actix-router = { version = "0.5.3", default-features = false, features = ["http"] }
|
||||
actix-web-codegen = { version = "4.3", optional = true, default-features = false }
|
||||
|
||||
ahash = "0.8"
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
cfg-if = "1"
|
||||
cookie = { version = "0.16", features = ["percent-encode"], optional = true }
|
||||
derive_more = "0.99.8"
|
||||
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
||||
encoding_rs = "0.8"
|
||||
foldhash = "0.1"
|
||||
futures-core = { version = "0.3.17", default-features = false }
|
||||
futures-util = { version = "0.3.17", default-features = false }
|
||||
itoa = "1"
|
||||
|
@ -163,6 +163,7 @@ serde = "1.0"
|
|||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6.1"
|
||||
tracing = "0.1.30"
|
||||
socket2 = "0.5"
|
||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
||||
url = "2.1"
|
||||
|
@ -172,17 +173,17 @@ actix-files = "0.6"
|
|||
actix-test = { version = "0.1", features = ["openssl", "rustls-0_23"] }
|
||||
awc = { version = "3", features = ["openssl"] }
|
||||
|
||||
brotli = "6"
|
||||
brotli = "7"
|
||||
const-str = "0.5"
|
||||
core_affinity = "0.8"
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
env_logger = "0.11"
|
||||
flate2 = "1.0.13"
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["std"] }
|
||||
rand = "0.8"
|
||||
rand = "0.9"
|
||||
rcgen = "0.13"
|
||||
rustls-pemfile = "2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
static_assertions = "1"
|
||||
tls-openssl = { package = "openssl", version = "0.10.55" }
|
||||
tls-rustls = { package = "rustls", version = "0.23" }
|
||||
|
|
|
@ -8,13 +8,13 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.8.0)
|
||||
[](https://docs.rs/actix-web/4.10.2)
|
||||

|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.8.0)
|
||||
[](https://deps.rs/crate/actix-web/4.10.2)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions/workflows/ci.yml)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||

|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
//! Shows a few of ways to use the `from_fn` middleware.
|
||||
|
||||
use std::{collections::HashMap, io, rc::Rc, time::Duration};
|
||||
|
||||
use actix_web::{
|
||||
body::MessageBody,
|
||||
dev::{Service, ServiceRequest, ServiceResponse, Transform},
|
||||
http::header::{self, HeaderValue, Range},
|
||||
middleware::{from_fn, Logger, Next},
|
||||
web::{self, Header, Query},
|
||||
App, Error, HttpResponse, HttpServer,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
async fn noop<B>(req: ServiceRequest, next: Next<B>) -> Result<ServiceResponse<B>, Error> {
|
||||
next.call(req).await
|
||||
}
|
||||
|
||||
async fn print_range_header<B>(
|
||||
range_header: Option<Header<Range>>,
|
||||
req: ServiceRequest,
|
||||
next: Next<B>,
|
||||
) -> Result<ServiceResponse<B>, Error> {
|
||||
if let Some(Header(range)) = range_header {
|
||||
println!("Range: {range}");
|
||||
} else {
|
||||
println!("No Range header");
|
||||
}
|
||||
|
||||
next.call(req).await
|
||||
}
|
||||
|
||||
async fn mutate_body_type(
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, Error> {
|
||||
let res = next.call(req).await?;
|
||||
Ok(res.map_into_left_body::<()>())
|
||||
}
|
||||
|
||||
async fn mutate_body_type_with_extractors(
|
||||
string_body: String,
|
||||
query: Query<HashMap<String, String>>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, Error> {
|
||||
println!("body is: {string_body}");
|
||||
println!("query string: {query:?}");
|
||||
|
||||
let res = next.call(req).await?;
|
||||
|
||||
Ok(res.map_body(move |_, _| string_body))
|
||||
}
|
||||
|
||||
async fn timeout_10secs(
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, Error> {
|
||||
match tokio::time::timeout(Duration::from_secs(10), next.call(req)).await {
|
||||
Ok(res) => res,
|
||||
Err(_err) => Err(actix_web::error::ErrorRequestTimeout("")),
|
||||
}
|
||||
}
|
||||
|
||||
struct MyMw(bool);
|
||||
|
||||
impl MyMw {
|
||||
async fn mw_cb(
|
||||
&self,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, Error> {
|
||||
let mut res = match self.0 {
|
||||
true => req.into_response("short-circuited").map_into_right_body(),
|
||||
false => next.call(req).await?.map_into_left_body(),
|
||||
};
|
||||
|
||||
res.headers_mut()
|
||||
.insert(header::WARNING, HeaderValue::from_static("42"));
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn into_middleware<S, B>(
|
||||
self,
|
||||
) -> impl Transform<
|
||||
S,
|
||||
ServiceRequest,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Error = Error,
|
||||
InitError = (),
|
||||
>
|
||||
where
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
|
||||
B: MessageBody + 'static,
|
||||
{
|
||||
let this = Rc::new(self);
|
||||
from_fn(move |req, next| {
|
||||
let this = Rc::clone(&this);
|
||||
async move { Self::mw_cb(&this, req, next).await }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
|
||||
let bind = ("127.0.0.1", 8080);
|
||||
info!("staring server at http://{}:{}", &bind.0, &bind.1);
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
.wrap(from_fn(noop))
|
||||
.wrap(from_fn(print_range_header))
|
||||
.wrap(from_fn(mutate_body_type))
|
||||
.wrap(from_fn(mutate_body_type_with_extractors))
|
||||
.wrap(from_fn(timeout_10secs))
|
||||
// switch bool to true to observe early response
|
||||
.wrap(MyMw(false).into_middleware())
|
||||
.wrap(Logger::default())
|
||||
.default_service(web::to(HttpResponse::Ok))
|
||||
})
|
||||
.workers(1)
|
||||
.bind(bind)?
|
||||
.run()
|
||||
.await
|
||||
}
|
|
@ -269,9 +269,9 @@ where
|
|||
+ 'static,
|
||||
U::InitError: fmt::Debug,
|
||||
{
|
||||
let svc = svc
|
||||
.into_factory()
|
||||
.map_init_err(|e| log::error!("Can not construct default service: {:?}", e));
|
||||
let svc = svc.into_factory().map_init_err(|err| {
|
||||
log::error!("Can not construct default service: {err:?}");
|
||||
});
|
||||
|
||||
self.default = Some(Rc::new(boxed::factory(svc)));
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ pub type Result<T, E = Error> = std::result::Result<T, E>;
|
|||
|
||||
/// An error representing a problem running a blocking task on a thread pool.
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Blocking thread pool is shut down unexpectedly")]
|
||||
#[display("Blocking thread pool is shut down unexpectedly")]
|
||||
#[non_exhaustive]
|
||||
pub struct BlockingError;
|
||||
|
||||
|
@ -40,15 +40,15 @@ impl ResponseError for crate::error::BlockingError {}
|
|||
#[non_exhaustive]
|
||||
pub enum UrlGenerationError {
|
||||
/// Resource not found.
|
||||
#[display(fmt = "Resource not found")]
|
||||
#[display("Resource not found")]
|
||||
ResourceNotFound,
|
||||
|
||||
/// Not all URL parameters covered.
|
||||
#[display(fmt = "Not all URL parameters covered")]
|
||||
#[display("Not all URL parameters covered")]
|
||||
NotEnoughElements,
|
||||
|
||||
/// URL parse error.
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
ParseError(UrlParseError),
|
||||
}
|
||||
|
||||
|
@ -59,39 +59,39 @@ impl ResponseError for UrlGenerationError {}
|
|||
#[non_exhaustive]
|
||||
pub enum UrlencodedError {
|
||||
/// Can not decode chunked transfer encoding.
|
||||
#[display(fmt = "Can not decode chunked transfer encoding.")]
|
||||
#[display("Can not decode chunked transfer encoding.")]
|
||||
Chunked,
|
||||
|
||||
/// Payload size is larger than allowed. (default limit: 256kB).
|
||||
#[display(
|
||||
fmt = "URL encoded payload is larger ({} bytes) than allowed (limit: {} bytes).",
|
||||
"URL encoded payload is larger ({} bytes) than allowed (limit: {} bytes).",
|
||||
size,
|
||||
limit
|
||||
)]
|
||||
Overflow { size: usize, limit: usize },
|
||||
|
||||
/// Payload size is now known.
|
||||
#[display(fmt = "Payload size is now known.")]
|
||||
#[display("Payload size is now known.")]
|
||||
UnknownLength,
|
||||
|
||||
/// Content type error.
|
||||
#[display(fmt = "Content type error.")]
|
||||
#[display("Content type error.")]
|
||||
ContentType,
|
||||
|
||||
/// Parse error.
|
||||
#[display(fmt = "Parse error: {}.", _0)]
|
||||
#[display("Parse error: {}.", _0)]
|
||||
Parse(FormDeError),
|
||||
|
||||
/// Encoding error.
|
||||
#[display(fmt = "Encoding error.")]
|
||||
#[display("Encoding error.")]
|
||||
Encoding,
|
||||
|
||||
/// Serialize error.
|
||||
#[display(fmt = "Serialize error: {}.", _0)]
|
||||
#[display("Serialize error: {}.", _0)]
|
||||
Serialize(FormError),
|
||||
|
||||
/// Payload error.
|
||||
#[display(fmt = "Error that occur during reading payload: {}.", _0)]
|
||||
#[display("Error that occur during reading payload: {}.", _0)]
|
||||
Payload(PayloadError),
|
||||
}
|
||||
|
||||
|
@ -113,30 +113,30 @@ impl ResponseError for UrlencodedError {
|
|||
pub enum JsonPayloadError {
|
||||
/// Payload size is bigger than allowed & content length header set. (default: 2MB)
|
||||
#[display(
|
||||
fmt = "JSON payload ({} bytes) is larger than allowed (limit: {} bytes).",
|
||||
"JSON payload ({} bytes) is larger than allowed (limit: {} bytes).",
|
||||
length,
|
||||
limit
|
||||
)]
|
||||
OverflowKnownLength { length: usize, limit: usize },
|
||||
|
||||
/// Payload size is bigger than allowed but no content length header set. (default: 2MB)
|
||||
#[display(fmt = "JSON payload has exceeded limit ({} bytes).", limit)]
|
||||
#[display("JSON payload has exceeded limit ({} bytes).", limit)]
|
||||
Overflow { limit: usize },
|
||||
|
||||
/// Content type error
|
||||
#[display(fmt = "Content type error")]
|
||||
#[display("Content type error")]
|
||||
ContentType,
|
||||
|
||||
/// Deserialize error
|
||||
#[display(fmt = "Json deserialize error: {}", _0)]
|
||||
#[display("Json deserialize error: {}", _0)]
|
||||
Deserialize(JsonError),
|
||||
|
||||
/// Serialize error
|
||||
#[display(fmt = "Json serialize error: {}", _0)]
|
||||
#[display("Json serialize error: {}", _0)]
|
||||
Serialize(JsonError),
|
||||
|
||||
/// Payload error
|
||||
#[display(fmt = "Error that occur during reading payload: {}", _0)]
|
||||
#[display("Error that occur during reading payload: {}", _0)]
|
||||
Payload(PayloadError),
|
||||
}
|
||||
|
||||
|
@ -166,7 +166,7 @@ impl ResponseError for JsonPayloadError {
|
|||
#[non_exhaustive]
|
||||
pub enum PathError {
|
||||
/// Deserialize error
|
||||
#[display(fmt = "Path deserialize error: {}", _0)]
|
||||
#[display("Path deserialize error: {}", _0)]
|
||||
Deserialize(serde::de::value::Error),
|
||||
}
|
||||
|
||||
|
@ -182,7 +182,7 @@ impl ResponseError for PathError {
|
|||
#[non_exhaustive]
|
||||
pub enum QueryPayloadError {
|
||||
/// Query deserialize error.
|
||||
#[display(fmt = "Query deserialize error: {}", _0)]
|
||||
#[display("Query deserialize error: {}", _0)]
|
||||
Deserialize(serde::de::value::Error),
|
||||
}
|
||||
|
||||
|
@ -196,20 +196,20 @@ impl ResponseError for QueryPayloadError {
|
|||
#[derive(Debug, Display, Error, From)]
|
||||
#[non_exhaustive]
|
||||
pub enum ReadlinesError {
|
||||
#[display(fmt = "Encoding error")]
|
||||
#[display("Encoding error")]
|
||||
/// Payload size is bigger than allowed. (default: 256kB)
|
||||
EncodingError,
|
||||
|
||||
/// Payload error.
|
||||
#[display(fmt = "Error that occur during reading payload: {}", _0)]
|
||||
#[display("Error that occur during reading payload: {}", _0)]
|
||||
Payload(PayloadError),
|
||||
|
||||
/// Line limit exceeded.
|
||||
#[display(fmt = "Line limit exceeded")]
|
||||
#[display("Line limit exceeded")]
|
||||
LimitOverflow,
|
||||
|
||||
/// ContentType error.
|
||||
#[display(fmt = "Content-type error")]
|
||||
#[display("Content-type error")]
|
||||
ContentTypeError(ContentTypeError),
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use bytes::BufMut;
|
|||
/// perform a remaining length check before writing.
|
||||
pub(crate) struct MutWriter<'a, B>(pub(crate) &'a mut B);
|
||||
|
||||
impl<'a, B> io::Write for MutWriter<'a, B>
|
||||
impl<B> io::Write for MutWriter<'_, B>
|
||||
where
|
||||
B: BufMut,
|
||||
{
|
||||
|
|
|
@ -206,11 +206,11 @@ impl DispositionParam {
|
|||
}
|
||||
}
|
||||
|
||||
/// A *Content-Disposition* header. It is compatible to be used either as
|
||||
/// [a response header for the main body](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition#as_a_response_header_for_the_main_body)
|
||||
/// as (re)defined in [RFC 6266](https://datatracker.ietf.org/doc/html/rfc6266), or as
|
||||
/// [a header for a multipart body](https://mdn.io/Content-Disposition#As_a_header_for_a_multipart_body)
|
||||
/// as (re)defined in [RFC 7587](https://datatracker.ietf.org/doc/html/rfc7578).
|
||||
/// `Content-Disposition` header.
|
||||
///
|
||||
/// It is compatible to be used either as [a response header for the main body][use_main_body]
|
||||
/// as (re)defined in [RFC 6266], or as [a header for a multipart body][use_multipart] as
|
||||
/// (re)defined in [RFC 7587].
|
||||
///
|
||||
/// In a regular HTTP response, the *Content-Disposition* response header is a header indicating if
|
||||
/// the content is expected to be displayed *inline* in the browser, that is, as a Web page or as
|
||||
|
@ -267,7 +267,7 @@ impl DispositionParam {
|
|||
/// parameters: vec![DispositionParam::FilenameExt(ExtendedValue {
|
||||
/// charset: Charset::Iso_8859_1, // The character set for the bytes of the filename
|
||||
/// language_tag: None, // The optional language tag (see `language-tag` crate)
|
||||
/// value: b"\xa9 Copyright 1989.txt".to_vec(), // the actual bytes of the filename
|
||||
/// value: b"\xA9 Ferris 2011.txt".to_vec(), // the actual bytes of the filename
|
||||
/// })],
|
||||
/// };
|
||||
/// assert!(cd1.is_attachment());
|
||||
|
@ -305,6 +305,11 @@ impl DispositionParam {
|
|||
/// change to match local file system conventions if applicable, and do not use directory path
|
||||
/// information that may be present.
|
||||
/// See [RFC 2183 §2.3](https://datatracker.ietf.org/doc/html/rfc2183#section-2.3).
|
||||
///
|
||||
/// [use_main_body]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition#as_a_response_header_for_the_main_body
|
||||
/// [RFC 6266]: https://datatracker.ietf.org/doc/html/rfc6266
|
||||
/// [use_multipart]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition#as_a_header_for_a_multipart_body
|
||||
/// [RFC 7587]: https://datatracker.ietf.org/doc/html/rfc7578
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ContentDisposition {
|
||||
/// The disposition type
|
||||
|
|
|
@ -235,7 +235,7 @@ impl FromRequest for ConnectionInfo {
|
|||
/// # let _svc = actix_web::web::to(handler);
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Display)]
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
pub struct PeerAddr(pub SocketAddr);
|
||||
|
||||
impl PeerAddr {
|
||||
|
@ -247,7 +247,7 @@ impl PeerAddr {
|
|||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[non_exhaustive]
|
||||
#[display(fmt = "Missing peer address")]
|
||||
#[display("Missing peer address")]
|
||||
pub struct MissingPeerAddr;
|
||||
|
||||
impl ResponseError for MissingPeerAddr {}
|
||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
|||
};
|
||||
|
||||
use actix_service::{Service, Transform};
|
||||
use ahash::AHashMap;
|
||||
use foldhash::HashMap as FoldHashMap;
|
||||
use futures_core::{future::LocalBoxFuture, ready};
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
|
@ -185,7 +185,7 @@ pub struct ErrorHandlers<B> {
|
|||
handlers: Handlers<B>,
|
||||
}
|
||||
|
||||
type Handlers<B> = Rc<AHashMap<StatusCode, Box<ErrorHandler<B>>>>;
|
||||
type Handlers<B> = Rc<FoldHashMap<StatusCode, Box<ErrorHandler<B>>>>;
|
||||
|
||||
impl<B> Default for ErrorHandlers<B> {
|
||||
fn default() -> Self {
|
||||
|
|
|
@ -704,7 +704,7 @@ impl FormatText {
|
|||
/// Converter to get a String from something that writes to a Formatter.
|
||||
pub(crate) struct FormatDisplay<'a>(&'a dyn Fn(&mut fmt::Formatter<'_>) -> Result<(), fmt::Error>);
|
||||
|
||||
impl<'a> fmt::Display for FormatDisplay<'a> {
|
||||
impl fmt::Display for FormatDisplay<'_> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
(self.0)(fmt)
|
||||
}
|
||||
|
|
|
@ -28,9 +28,9 @@ use crate::{
|
|||
///
|
||||
/// Resource in turn has at least one route. Route consists of an handlers objects and list of
|
||||
/// guards (objects that implement `Guard` trait). Resources and routes uses builder-like pattern
|
||||
/// for configuration. During request handling, resource object iterate through all routes and check
|
||||
/// guards for specific route, if request matches all guards, route considered matched and route
|
||||
/// handler get called.
|
||||
/// for configuration. During request handling, the resource object iterates through all routes
|
||||
/// and checks guards for the specific route, if the request matches all the guards, then the route
|
||||
/// is considered matched and the route handler gets called.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
|
@ -358,10 +358,9 @@ where
|
|||
U::InitError: fmt::Debug,
|
||||
{
|
||||
// create and configure default resource
|
||||
self.default = boxed::factory(
|
||||
f.into_factory()
|
||||
.map_init_err(|e| log::error!("Can not construct default service: {:?}", e)),
|
||||
);
|
||||
self.default = boxed::factory(f.into_factory().map_init_err(|err| {
|
||||
log::error!("Can not construct default service: {err:?}");
|
||||
}));
|
||||
|
||||
self
|
||||
}
|
||||
|
|
|
@ -131,6 +131,23 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
// Note: see https://github.com/actix/actix-web/issues/1108 for reasoning why Responder is not
|
||||
// implemented for `()`, and https://github.com/actix/actix-web/pull/3560 for discussion about this
|
||||
// impl and the decision not to include a similar one for `Option<()>`.
|
||||
impl<E> Responder for Result<(), E>
|
||||
where
|
||||
E: Into<Error>,
|
||||
{
|
||||
type Body = BoxBody;
|
||||
|
||||
fn respond_to(self, _req: &HttpRequest) -> HttpResponse {
|
||||
match self {
|
||||
Ok(()) => HttpResponse::new(StatusCode::NO_CONTENT),
|
||||
Err(err) => HttpResponse::from_error(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Responder> Responder for (R, StatusCode) {
|
||||
type Body = R::Body;
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::{
|
|||
};
|
||||
|
||||
use actix_router::ResourceDef;
|
||||
use ahash::AHashMap;
|
||||
use foldhash::HashMap as FoldHashMap;
|
||||
use url::Url;
|
||||
|
||||
use crate::{error::UrlGenerationError, request::HttpRequest};
|
||||
|
@ -19,7 +19,7 @@ pub struct ResourceMap {
|
|||
|
||||
/// Named resources within the tree or, for external resources, it points to isolated nodes
|
||||
/// outside the tree.
|
||||
named: AHashMap<String, Rc<ResourceMap>>,
|
||||
named: FoldHashMap<String, Rc<ResourceMap>>,
|
||||
|
||||
parent: RefCell<Weak<ResourceMap>>,
|
||||
|
||||
|
@ -32,7 +32,7 @@ impl ResourceMap {
|
|||
pub fn new(root: ResourceDef) -> Self {
|
||||
ResourceMap {
|
||||
pattern: root,
|
||||
named: AHashMap::default(),
|
||||
named: FoldHashMap::default(),
|
||||
parent: RefCell::new(Weak::new()),
|
||||
nodes: Some(Vec::new()),
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ impl ResourceMap {
|
|||
} else {
|
||||
let new_node = Rc::new(ResourceMap {
|
||||
pattern: pattern.clone(),
|
||||
named: AHashMap::default(),
|
||||
named: FoldHashMap::default(),
|
||||
parent: RefCell::new(Weak::new()),
|
||||
nodes: None,
|
||||
});
|
||||
|
|
|
@ -278,7 +278,9 @@ where
|
|||
{
|
||||
// create and configure default resource
|
||||
self.default = Some(Rc::new(boxed::factory(f.into_factory().map_init_err(
|
||||
|e| log::error!("Can not construct default service: {:?}", e),
|
||||
|err| {
|
||||
log::error!("Can not construct default service: {err:?}");
|
||||
},
|
||||
))));
|
||||
|
||||
self
|
||||
|
|
|
@ -193,7 +193,7 @@ where
|
|||
///
|
||||
/// One thread pool is set up **per worker**; not shared across workers.
|
||||
///
|
||||
/// By default set to 512 divided by the number of workers.
|
||||
/// By default, set to 512 divided by [available parallelism](std::thread::available_parallelism()).
|
||||
pub fn worker_max_blocking_threads(mut self, num: usize) -> Self {
|
||||
self.builder = self.builder.worker_max_blocking_threads(num);
|
||||
self
|
||||
|
@ -1085,7 +1085,10 @@ fn create_tcp_listener(addr: net::SocketAddr, backlog: u32) -> io::Result<net::T
|
|||
use socket2::{Domain, Protocol, Socket, Type};
|
||||
let domain = Domain::for_address(addr);
|
||||
let socket = Socket::new(domain, Type::STREAM, Some(Protocol::TCP))?;
|
||||
socket.set_reuse_address(true)?;
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
socket.set_reuse_address(true)?;
|
||||
}
|
||||
socket.bind(&addr.into())?;
|
||||
// clamp backlog to max u32 that fits in i32 range
|
||||
let backlog = cmp::min(backlog, i32::MAX as u32) as i32;
|
||||
|
|
|
@ -662,6 +662,7 @@ where
|
|||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! services {
|
||||
() => {()};
|
||||
($($x:expr),+ $(,)?) => {
|
||||
($($x,)+)
|
||||
}
|
||||
|
@ -870,4 +871,40 @@ mod tests {
|
|||
let req = test::TestRequest::default().to_request();
|
||||
let _res = test::call_service(&app, req).await;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_services_macro_with_multiple_arguments() {
|
||||
let result = services!(1, 2, 3);
|
||||
assert_eq!(result, (1, 2, 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_services_macro_with_single_argument() {
|
||||
let result = services!(1);
|
||||
assert_eq!(result, (1,));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_services_macro_with_no_arguments() {
|
||||
let result = services!();
|
||||
let () = result;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_services_macro_with_trailing_comma() {
|
||||
let result = services!(1, 2, 3,);
|
||||
assert_eq!(result, (1, 2, 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn define_services_macro_with_comments_in_arguments() {
|
||||
let result = services!(
|
||||
1, // First comment
|
||||
2, // Second comment
|
||||
3 // Third comment
|
||||
);
|
||||
|
||||
// Assert that comments are ignored and it correctly returns a tuple.
|
||||
assert_eq!(result, (1, 2, 3));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Various helpers for Actix applications to use during testing.
|
||||
//!
|
||||
//! # Creating A Test Service
|
||||
//! # Initializing A Test Service
|
||||
//! - [`init_service`]
|
||||
//!
|
||||
//! # Off-The-Shelf Test Services
|
||||
|
@ -49,6 +49,7 @@ pub use self::{
|
|||
/// Must be used inside an async test. Works for both `ServiceRequest` and `HttpRequest`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{http::StatusCode, HttpResponse};
|
||||
///
|
||||
|
|
|
@ -238,7 +238,7 @@ where
|
|||
match res {
|
||||
Ok(bytes) => {
|
||||
let fallback = bytes.clone();
|
||||
let left = L::from_request(this.req, &mut payload_from_bytes(bytes));
|
||||
let left = L::from_request(this.req, &mut dev::Payload::from(bytes));
|
||||
EitherExtractState::Left { left, fallback }
|
||||
}
|
||||
Err(err) => break Err(EitherExtractError::Bytes(err)),
|
||||
|
@ -251,7 +251,7 @@ where
|
|||
Err(left_err) => {
|
||||
let right = R::from_request(
|
||||
this.req,
|
||||
&mut payload_from_bytes(mem::take(fallback)),
|
||||
&mut dev::Payload::from(mem::take(fallback)),
|
||||
);
|
||||
EitherExtractState::Right {
|
||||
left_err: Some(left_err),
|
||||
|
@ -276,12 +276,6 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn payload_from_bytes(bytes: Bytes) -> dev::Payload {
|
||||
let (_, mut h1_payload) = actix_http::h1::Payload::create(true);
|
||||
h1_payload.unread_data(bytes);
|
||||
dev::Payload::from(h1_payload)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
|
@ -332,7 +332,7 @@ impl<T: DeserializeOwned> JsonBody<T> {
|
|||
(true, Ok(Some(mime))) => {
|
||||
mime.subtype() == mime::JSON
|
||||
|| mime.suffix() == Some(mime::JSON)
|
||||
|| ctype_fn.map_or(false, |predicate| predicate(mime))
|
||||
|| ctype_fn.is_some_and(|predicate| predicate(mime))
|
||||
}
|
||||
|
||||
// if content-type is expected but not parsable as mime type, bail
|
||||
|
@ -398,7 +398,7 @@ impl<T: DeserializeOwned> JsonBody<T> {
|
|||
_res: PhantomData,
|
||||
}
|
||||
}
|
||||
JsonBody::Error(e) => JsonBody::Error(e),
|
||||
JsonBody::Error(err) => JsonBody::Error(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -434,7 +434,7 @@ impl<T: DeserializeOwned> Future for JsonBody<T> {
|
|||
}
|
||||
}
|
||||
},
|
||||
JsonBody::Error(e) => Poll::Ready(Err(e.take().unwrap())),
|
||||
JsonBody::Error(err) => Poll::Ready(Err(err.take().unwrap())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,8 +89,8 @@ where
|
|||
);
|
||||
|
||||
if let Some(error_handler) = error_handler {
|
||||
let e = PathError::Deserialize(err);
|
||||
(error_handler)(e, req)
|
||||
let err = PathError::Deserialize(err);
|
||||
(error_handler)(err, req)
|
||||
} else {
|
||||
ErrorNotFound(err)
|
||||
}
|
||||
|
@ -159,7 +159,7 @@ mod tests {
|
|||
use crate::{error, http, test::TestRequest, HttpResponse};
|
||||
|
||||
#[derive(Deserialize, Debug, Display)]
|
||||
#[display(fmt = "MyStruct({}, {})", key, value)]
|
||||
#[display("MyStruct({}, {})", key, value)]
|
||||
struct MyStruct {
|
||||
key: String,
|
||||
value: String,
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::{fmt, ops, sync::Arc};
|
||||
|
||||
use actix_utils::future::{err, ok, Ready};
|
||||
use actix_utils::future::{ok, ready, Ready};
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequest};
|
||||
|
@ -118,8 +118,8 @@ impl<T: DeserializeOwned> FromRequest for Query<T> {
|
|||
|
||||
serde_urlencoded::from_str::<T>(req.query_string())
|
||||
.map(|val| ok(Query(val)))
|
||||
.unwrap_or_else(move |e| {
|
||||
let e = QueryPayloadError::Deserialize(e);
|
||||
.unwrap_or_else(move |err| {
|
||||
let err = QueryPayloadError::Deserialize(err);
|
||||
|
||||
log::debug!(
|
||||
"Failed during Query extractor deserialization. \
|
||||
|
@ -127,13 +127,13 @@ impl<T: DeserializeOwned> FromRequest for Query<T> {
|
|||
req.path()
|
||||
);
|
||||
|
||||
let e = if let Some(error_handler) = error_handler {
|
||||
(error_handler)(e, req)
|
||||
let err = if let Some(error_handler) = error_handler {
|
||||
(error_handler)(err, req)
|
||||
} else {
|
||||
e.into()
|
||||
err.into()
|
||||
};
|
||||
|
||||
err(e)
|
||||
ready(Err(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
#[cfg(feature = "openssl")]
|
||||
extern crate tls_openssl as openssl;
|
||||
|
||||
#[cfg(any(unix, feature = "openssl"))]
|
||||
use {
|
||||
actix_web::{web, App, HttpResponse, HttpServer},
|
||||
std::{sync::mpsc, thread, time::Duration},
|
||||
};
|
||||
use std::{sync::mpsc, thread, time::Duration};
|
||||
|
||||
use actix_web::{web, App, HttpResponse, HttpServer};
|
||||
|
||||
#[cfg(unix)]
|
||||
#[actix_rt::test]
|
||||
async fn test_start() {
|
||||
let addr = actix_test::unused_addr();
|
||||
|
@ -53,6 +50,27 @@ async fn test_start() {
|
|||
let response = client.get(host.clone()).send().await.unwrap();
|
||||
assert!(response.status().is_success());
|
||||
|
||||
// Attempt to start a second server using the same address.
|
||||
let result = HttpServer::new(|| {
|
||||
App::new().service(
|
||||
web::resource("/").route(web::to(|| async { HttpResponse::Ok().body("test") })),
|
||||
)
|
||||
})
|
||||
.workers(1)
|
||||
.backlog(1)
|
||||
.max_connections(10)
|
||||
.max_connection_rate(10)
|
||||
.keep_alive(Duration::from_secs(10))
|
||||
.client_request_timeout(Duration::from_secs(5))
|
||||
.client_disconnect_timeout(Duration::ZERO)
|
||||
.server_hostname("localhost")
|
||||
.system_exit()
|
||||
.disable_signals()
|
||||
.bind(format!("{}", addr));
|
||||
|
||||
// This should fail: the address is in use.
|
||||
assert!(result.is_err());
|
||||
|
||||
srv.stop(false).await;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ use openssl::{
|
|||
ssl::{SslAcceptor, SslMethod},
|
||||
x509::X509,
|
||||
};
|
||||
use rand::{distributions::Alphanumeric, Rng as _};
|
||||
use rand::distr::{Alphanumeric, SampleString as _};
|
||||
|
||||
mod utils;
|
||||
|
||||
|
@ -188,11 +188,7 @@ async fn body_gzip_large() {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_body_gzip_large_random() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(70_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 70_000);
|
||||
let srv_data = data.clone();
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), move || {
|
||||
|
@ -432,11 +428,7 @@ async fn test_zstd_encoding() {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_zstd_encoding_large() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(320_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 320_000);
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||
App::new().service(
|
||||
|
@ -529,11 +521,7 @@ async fn test_gzip_encoding_large() {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_reading_gzip_encoding_large_random() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(60_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 60_000);
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||
App::new().service(web::resource("/").route(web::to(move |body: Bytes| async {
|
||||
|
@ -599,11 +587,7 @@ async fn test_reading_deflate_encoding_large() {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_reading_deflate_encoding_large_random() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(160_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 160_000);
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||
App::new().service(web::resource("/").route(web::to(move |body: Bytes| async {
|
||||
|
@ -648,11 +632,7 @@ async fn test_brotli_encoding() {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_brotli_encoding_large() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(320_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 320_000);
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||
App::new().service(
|
||||
|
@ -737,11 +717,7 @@ mod plus_rustls {
|
|||
|
||||
#[actix_rt::test]
|
||||
async fn test_reading_deflate_encoding_large_random_rustls() {
|
||||
let data = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(160_000)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let data = Alphanumeric.sample_string(&mut rand::rng(), 160_000);
|
||||
|
||||
let srv = actix_test::start_with(actix_test::config().rustls_0_23(tls_config()), || {
|
||||
App::new().service(web::resource("/").route(web::to(|bytes: Bytes| async {
|
||||
|
|
|
@ -2,11 +2,22 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
## 3.6.0
|
||||
|
||||
- Prevent panics on connection pool drop when Tokio runtime is shutdown early.
|
||||
- Do not send `Host` header on HTTP/2 requests, as it is not required, and some web servers may reject it.
|
||||
- Update `brotli` dependency to `7`.
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 3.5.1
|
||||
|
||||
- Fix WebSocket `Host` request header value when using a non-default port.
|
||||
|
||||
## 3.5.0
|
||||
|
||||
- Add `rustls-0_23`, `rustls-0_23-webpki-roots`, and `rustls-0_23-native-roots` crate features.
|
||||
- Add `awc::Connector::rustls_0_23()` constructor.
|
||||
- Fix `rustls-0_22-native-roots` root store lookup
|
||||
- Fix `rustls-0_22-native-roots` root store lookup.
|
||||
- Update `brotli` dependency to `6`.
|
||||
- Minimum supported Rust version (MSRV) is now 1.72.
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "awc"
|
||||
version = "3.5.0"
|
||||
version = "3.6.0"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Async HTTP and WebSocket client library"
|
||||
keywords = ["actix", "http", "framework", "async", "web"]
|
||||
|
@ -98,7 +98,7 @@ dangerous-h2c = []
|
|||
[dependencies]
|
||||
actix-codec = "0.5"
|
||||
actix-service = "2"
|
||||
actix-http = { version = "3.7", features = ["http2", "ws"] }
|
||||
actix-http = { version = "3.10", features = ["http2", "ws"] }
|
||||
actix-rt = { version = "2.1", default-features = false }
|
||||
actix-tls = { version = "3.4", features = ["connect", "uri"] }
|
||||
actix-utils = "3"
|
||||
|
@ -106,7 +106,7 @@ actix-utils = "3"
|
|||
base64 = "0.22"
|
||||
bytes = "1"
|
||||
cfg-if = "1"
|
||||
derive_more = "0.99.5"
|
||||
derive_more = { version = "2", features = ["display", "error", "from"] }
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.26"
|
||||
|
@ -116,7 +116,7 @@ log =" 0.4"
|
|||
mime = "0.3"
|
||||
percent-encoding = "2.1"
|
||||
pin-project-lite = "0.2"
|
||||
rand = "0.8"
|
||||
rand = "0.9"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
|
@ -141,7 +141,7 @@ actix-tls = { version = "3.4", features = ["openssl", "rustls-0_23"] }
|
|||
actix-utils = "3"
|
||||
actix-web = { version = "4", features = ["openssl"] }
|
||||
|
||||
brotli = "6"
|
||||
brotli = "7"
|
||||
const-str = "0.5"
|
||||
env_logger = "0.11"
|
||||
flate2 = "1.0.13"
|
||||
|
@ -153,9 +153,9 @@ tokio = { version = "1.24.2", features = ["rt-multi-thread", "macros"] }
|
|||
zstd = "0.13"
|
||||
tls-rustls-0_23 = { package = "rustls", version = "0.23" } # add rustls 0.23 with default features to make aws_lc_rs work in tests
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
required-features = ["rustls-0_23-webpki-roots"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
[](https://crates.io/crates/awc)
|
||||
[](https://docs.rs/awc/3.5.0)
|
||||
[](https://docs.rs/awc/3.6.0)
|
||||

|
||||
[](https://deps.rs/crate/awc/3.5.0)
|
||||
[](https://deps.rs/crate/awc/3.6.0)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
|
|
@ -1,25 +1,39 @@
|
|||
use std::error::Error as StdError;
|
||||
//! Demonstrates construction and usage of a TLS-capable HTTP client.
|
||||
|
||||
extern crate tls_rustls_0_23 as rustls;
|
||||
|
||||
use std::{error::Error as StdError, sync::Arc};
|
||||
|
||||
use actix_tls::connect::rustls_0_23::webpki_roots_cert_store;
|
||||
use rustls::ClientConfig;
|
||||
|
||||
/// If we want to make requests to addresses starting with `https`, we need to enable the rustls feature of awc
|
||||
/// `awc = { version = "3.5.0", features = ["rustls"] }`
|
||||
#[actix_rt::main]
|
||||
async fn main() -> Result<(), Box<dyn StdError>> {
|
||||
env_logger::init_from_env(env_logger::Env::new().default_filter_or("info"));
|
||||
|
||||
// construct request builder
|
||||
let client = awc::Client::new();
|
||||
let mut config = ClientConfig::builder()
|
||||
.with_root_certificates(webpki_roots_cert_store())
|
||||
.with_no_client_auth();
|
||||
|
||||
let protos = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||
config.alpn_protocols = protos;
|
||||
|
||||
// construct request builder with TLS support
|
||||
let client = awc::Client::builder()
|
||||
.connector(awc::Connector::new().rustls_0_23(Arc::new(config)))
|
||||
.finish();
|
||||
|
||||
// configure request
|
||||
let request = client
|
||||
.get("https://www.rust-lang.org/")
|
||||
.append_header(("User-Agent", "Actix-web"));
|
||||
.append_header(("User-Agent", "awc/3.0"));
|
||||
|
||||
println!("Request: {:?}", request);
|
||||
println!("Request: {request:?}");
|
||||
|
||||
let mut response = request.send().await?;
|
||||
|
||||
// server response head
|
||||
println!("Response: {:?}", response);
|
||||
println!("Response: {response:?}");
|
||||
|
||||
// read response body
|
||||
let body = response.body().await?;
|
||||
|
|
|
@ -89,9 +89,9 @@ impl Connector<()> {
|
|||
/// # Panics
|
||||
///
|
||||
/// - When the `rustls-0_23-webpki-roots` or `rustls-0_23-native-roots` features are enabled
|
||||
/// and no default crypto provider has been loaded, this method will panic.
|
||||
/// and no default crypto provider has been loaded, this method will panic.
|
||||
/// - When the `rustls-0_23-native-roots` or `rustls-0_22-native-roots` features are enabled
|
||||
/// and the runtime system has no native root certificates, this method will panic.
|
||||
/// and the runtime system has no native root certificates, this method will panic.
|
||||
#[allow(clippy::new_ret_no_self, clippy::let_unit_value)]
|
||||
pub fn new() -> Connector<
|
||||
impl Service<
|
||||
|
@ -511,7 +511,8 @@ where
|
|||
let h2 = sock
|
||||
.ssl()
|
||||
.selected_alpn_protocol()
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
.is_some_and(|protos| protos.windows(2).any(|w| w == H2));
|
||||
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -550,7 +551,8 @@ where
|
|||
.get_ref()
|
||||
.1
|
||||
.alpn_protocol()
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
.is_some_and(|protos| protos.windows(2).any(|w| w == H2));
|
||||
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -584,7 +586,8 @@ where
|
|||
.get_ref()
|
||||
.1
|
||||
.alpn_protocol()
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
.is_some_and(|protos| protos.windows(2).any(|w| w == H2));
|
||||
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -621,7 +624,8 @@ where
|
|||
.get_ref()
|
||||
.1
|
||||
.alpn_protocol()
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
.is_some_and(|protos| protos.windows(2).any(|w| w == H2));
|
||||
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
@ -655,7 +659,8 @@ where
|
|||
.get_ref()
|
||||
.1
|
||||
.alpn_protocol()
|
||||
.map_or(false, |protos| protos.windows(2).any(|w| w == H2));
|
||||
.is_some_and(|protos| protos.windows(2).any(|w| w == H2));
|
||||
|
||||
if h2 {
|
||||
(Box::new(sock), Protocol::Http2)
|
||||
} else {
|
||||
|
|
|
@ -12,40 +12,40 @@ use crate::BoxError;
|
|||
#[non_exhaustive]
|
||||
pub enum ConnectError {
|
||||
/// SSL feature is not enabled
|
||||
#[display(fmt = "SSL is not supported")]
|
||||
#[display("SSL is not supported")]
|
||||
SslIsNotSupported,
|
||||
|
||||
/// SSL error
|
||||
#[cfg(feature = "openssl")]
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
SslError(OpensslError),
|
||||
|
||||
/// Failed to resolve the hostname
|
||||
#[display(fmt = "Failed resolving hostname: {}", _0)]
|
||||
#[display("Failed resolving hostname: {}", _0)]
|
||||
Resolver(Box<dyn std::error::Error>),
|
||||
|
||||
/// No dns records
|
||||
#[display(fmt = "No DNS records found for the input")]
|
||||
#[display("No DNS records found for the input")]
|
||||
NoRecords,
|
||||
|
||||
/// Http2 error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
H2(h2::Error),
|
||||
|
||||
/// Connecting took too long
|
||||
#[display(fmt = "Timeout while establishing connection")]
|
||||
#[display("Timeout while establishing connection")]
|
||||
Timeout,
|
||||
|
||||
/// Connector has been disconnected
|
||||
#[display(fmt = "Internal error: connector has been disconnected")]
|
||||
#[display("Internal error: connector has been disconnected")]
|
||||
Disconnected,
|
||||
|
||||
/// Unresolved host name
|
||||
#[display(fmt = "Connector received `Connect` method with unresolved host")]
|
||||
#[display("Connector received `Connect` method with unresolved host")]
|
||||
Unresolved,
|
||||
|
||||
/// Connection io error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Io(io::Error),
|
||||
}
|
||||
|
||||
|
@ -54,11 +54,11 @@ impl std::error::Error for ConnectError {}
|
|||
impl From<actix_tls::connect::ConnectError> for ConnectError {
|
||||
fn from(err: actix_tls::connect::ConnectError) -> ConnectError {
|
||||
match err {
|
||||
actix_tls::connect::ConnectError::Resolver(e) => ConnectError::Resolver(e),
|
||||
actix_tls::connect::ConnectError::Resolver(err) => ConnectError::Resolver(err),
|
||||
actix_tls::connect::ConnectError::NoRecords => ConnectError::NoRecords,
|
||||
actix_tls::connect::ConnectError::InvalidInput => panic!(),
|
||||
actix_tls::connect::ConnectError::Unresolved => ConnectError::Unresolved,
|
||||
actix_tls::connect::ConnectError::Io(e) => ConnectError::Io(e),
|
||||
actix_tls::connect::ConnectError::Io(err) => ConnectError::Io(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -66,16 +66,16 @@ impl From<actix_tls::connect::ConnectError> for ConnectError {
|
|||
#[derive(Debug, Display, From)]
|
||||
#[non_exhaustive]
|
||||
pub enum InvalidUrl {
|
||||
#[display(fmt = "Missing URL scheme")]
|
||||
#[display("Missing URL scheme")]
|
||||
MissingScheme,
|
||||
|
||||
#[display(fmt = "Unknown URL scheme")]
|
||||
#[display("Unknown URL scheme")]
|
||||
UnknownScheme,
|
||||
|
||||
#[display(fmt = "Missing host name")]
|
||||
#[display("Missing host name")]
|
||||
MissingHost,
|
||||
|
||||
#[display(fmt = "URL parse error: {}", _0)]
|
||||
#[display("URL parse error: {}", _0)]
|
||||
HttpError(http::Error),
|
||||
}
|
||||
|
||||
|
@ -86,11 +86,11 @@ impl std::error::Error for InvalidUrl {}
|
|||
#[non_exhaustive]
|
||||
pub enum SendRequestError {
|
||||
/// Invalid URL
|
||||
#[display(fmt = "Invalid URL: {}", _0)]
|
||||
#[display("Invalid URL: {}", _0)]
|
||||
Url(InvalidUrl),
|
||||
|
||||
/// Failed to connect to host
|
||||
#[display(fmt = "Failed to connect to host: {}", _0)]
|
||||
#[display("Failed to connect to host: {}", _0)]
|
||||
Connect(ConnectError),
|
||||
|
||||
/// Error sending request
|
||||
|
@ -100,26 +100,26 @@ pub enum SendRequestError {
|
|||
Response(ParseError),
|
||||
|
||||
/// Http error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Http(HttpError),
|
||||
|
||||
/// Http2 error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
H2(h2::Error),
|
||||
|
||||
/// Response took too long
|
||||
#[display(fmt = "Timeout while waiting for response")]
|
||||
#[display("Timeout while waiting for response")]
|
||||
Timeout,
|
||||
|
||||
/// Tunnels are not supported for HTTP/2 connection
|
||||
#[display(fmt = "Tunnels are not supported for http2 connection")]
|
||||
#[display("Tunnels are not supported for http2 connection")]
|
||||
TunnelNotSupported,
|
||||
|
||||
/// Error sending request body
|
||||
Body(BoxError),
|
||||
|
||||
/// Other errors that can occur after submitting a request.
|
||||
#[display(fmt = "{:?}: {}", _1, _0)]
|
||||
#[display("{:?}: {}", _1, _0)]
|
||||
Custom(BoxError, Box<dyn fmt::Debug>),
|
||||
}
|
||||
|
||||
|
@ -130,15 +130,15 @@ impl std::error::Error for SendRequestError {}
|
|||
#[non_exhaustive]
|
||||
pub enum FreezeRequestError {
|
||||
/// Invalid URL
|
||||
#[display(fmt = "Invalid URL: {}", _0)]
|
||||
#[display("Invalid URL: {}", _0)]
|
||||
Url(InvalidUrl),
|
||||
|
||||
/// HTTP error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Http(HttpError),
|
||||
|
||||
/// Other errors that can occur after submitting a request.
|
||||
#[display(fmt = "{:?}: {}", _1, _0)]
|
||||
#[display("{:?}: {}", _1, _0)]
|
||||
Custom(BoxError, Box<dyn fmt::Debug>),
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ use h2::{
|
|||
SendStream,
|
||||
};
|
||||
use http::{
|
||||
header::{HeaderValue, CONNECTION, CONTENT_LENGTH, TRANSFER_ENCODING},
|
||||
header::{HeaderValue, CONNECTION, CONTENT_LENGTH, HOST, TRANSFER_ENCODING},
|
||||
request::Request,
|
||||
Method, Version,
|
||||
};
|
||||
|
@ -97,7 +97,7 @@ where
|
|||
// TODO: consider skipping other headers according to:
|
||||
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
|
||||
// omit HTTP/1.x only headers
|
||||
CONNECTION | TRANSFER_ENCODING => continue,
|
||||
CONNECTION | TRANSFER_ENCODING | HOST => continue,
|
||||
CONTENT_LENGTH if skip_len => continue,
|
||||
// DATE => has_date = true,
|
||||
_ => {}
|
||||
|
|
|
@ -31,7 +31,7 @@ use super::{
|
|||
Connect,
|
||||
};
|
||||
|
||||
#[derive(Hash, Eq, PartialEq, Clone, Debug)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Key {
|
||||
authority: Authority,
|
||||
}
|
||||
|
@ -42,8 +42,8 @@ impl From<Authority> for Key {
|
|||
}
|
||||
}
|
||||
|
||||
/// Connections pool to reuse I/O per [`Authority`].
|
||||
#[doc(hidden)]
|
||||
/// Connections pool for reuse Io type for certain [`http::uri::Authority`] as key.
|
||||
pub struct ConnectionPool<S, Io>
|
||||
where
|
||||
Io: AsyncWrite + Unpin + 'static,
|
||||
|
@ -52,7 +52,7 @@ where
|
|||
inner: ConnectionPoolInner<Io>,
|
||||
}
|
||||
|
||||
/// wrapper type for check the ref count of Rc.
|
||||
/// Wrapper type for check the ref count of Rc.
|
||||
pub struct ConnectionPoolInner<Io>(Rc<ConnectionPoolInnerPriv<Io>>)
|
||||
where
|
||||
Io: AsyncWrite + Unpin + 'static;
|
||||
|
@ -63,7 +63,7 @@ where
|
|||
{
|
||||
fn new(config: ConnectorConfig) -> Self {
|
||||
let permits = Arc::new(Semaphore::new(config.limit));
|
||||
let available = RefCell::new(HashMap::default());
|
||||
let available = RefCell::new(HashMap::new());
|
||||
|
||||
Self(Rc::new(ConnectionPoolInnerPriv {
|
||||
config,
|
||||
|
@ -72,11 +72,13 @@ where
|
|||
}))
|
||||
}
|
||||
|
||||
/// spawn a async for graceful shutdown h1 Io type with a timeout.
|
||||
/// Spawns a graceful shutdown task for the underlying I/O with a timeout.
|
||||
fn close(&self, conn: ConnectionInnerType<Io>) {
|
||||
if let Some(timeout) = self.config.disconnect_timeout {
|
||||
if let ConnectionInnerType::H1(io) = conn {
|
||||
actix_rt::spawn(CloseConnection::new(io, timeout));
|
||||
if tokio::runtime::Handle::try_current().is_ok() {
|
||||
actix_rt::spawn(CloseConnection::new(io, timeout));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,35 +18,35 @@ pub use crate::client::{ConnectError, FreezeRequestError, InvalidUrl, SendReques
|
|||
#[derive(Debug, Display, From)]
|
||||
pub enum WsClientError {
|
||||
/// Invalid response status
|
||||
#[display(fmt = "Invalid response status")]
|
||||
#[display("Invalid response status")]
|
||||
InvalidResponseStatus(StatusCode),
|
||||
|
||||
/// Invalid upgrade header
|
||||
#[display(fmt = "Invalid upgrade header")]
|
||||
#[display("Invalid upgrade header")]
|
||||
InvalidUpgradeHeader,
|
||||
|
||||
/// Invalid connection header
|
||||
#[display(fmt = "Invalid connection header")]
|
||||
#[display("Invalid connection header")]
|
||||
InvalidConnectionHeader(HeaderValue),
|
||||
|
||||
/// Missing Connection header
|
||||
#[display(fmt = "Missing Connection header")]
|
||||
#[display("Missing Connection header")]
|
||||
MissingConnectionHeader,
|
||||
|
||||
/// Missing Sec-Websocket-Accept header
|
||||
#[display(fmt = "Missing Sec-Websocket-Accept header")]
|
||||
#[display("Missing Sec-Websocket-Accept header")]
|
||||
MissingWebSocketAcceptHeader,
|
||||
|
||||
/// Invalid challenge response
|
||||
#[display(fmt = "Invalid challenge response")]
|
||||
#[display("Invalid challenge response")]
|
||||
InvalidChallengeResponse([u8; 28], HeaderValue),
|
||||
|
||||
/// Protocol error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
Protocol(WsProtocolError),
|
||||
|
||||
/// Send request error
|
||||
#[display(fmt = "{}", _0)]
|
||||
#[display("{}", _0)]
|
||||
SendRequest(SendRequestError),
|
||||
}
|
||||
|
||||
|
@ -68,13 +68,13 @@ impl From<HttpError> for WsClientError {
|
|||
#[derive(Debug, Display, From)]
|
||||
pub enum JsonPayloadError {
|
||||
/// Content type error
|
||||
#[display(fmt = "Content type error")]
|
||||
#[display("Content type error")]
|
||||
ContentType,
|
||||
/// Deserialize error
|
||||
#[display(fmt = "Json deserialize error: {}", _0)]
|
||||
#[display("Json deserialize error: {}", _0)]
|
||||
Deserialize(JsonError),
|
||||
/// Payload error
|
||||
#[display(fmt = "Error that occur during reading payload: {}", _0)]
|
||||
#[display("Error that occur during reading payload: {}", _0)]
|
||||
Payload(PayloadError),
|
||||
}
|
||||
|
||||
|
|
|
@ -147,8 +147,8 @@ impl FrozenSendBuilder {
|
|||
|
||||
/// Complete request construction and send a body.
|
||||
pub fn send_body(self, body: impl MessageBody + 'static) -> SendClientRequest {
|
||||
if let Some(e) = self.err {
|
||||
return e.into();
|
||||
if let Some(err) = self.err {
|
||||
return err.into();
|
||||
}
|
||||
|
||||
RequestSender::Rc(self.req.head, Some(self.extra_headers)).send_body(
|
||||
|
@ -177,8 +177,8 @@ impl FrozenSendBuilder {
|
|||
|
||||
/// Complete request construction and send an urlencoded body.
|
||||
pub fn send_form(self, value: impl Serialize) -> SendClientRequest {
|
||||
if let Some(e) = self.err {
|
||||
return e.into();
|
||||
if let Some(err) = self.err {
|
||||
return err.into();
|
||||
}
|
||||
|
||||
RequestSender::Rc(self.req.head, Some(self.extra_headers)).send_form(
|
||||
|
@ -196,8 +196,8 @@ impl FrozenSendBuilder {
|
|||
S: Stream<Item = Result<Bytes, E>> + 'static,
|
||||
E: Into<BoxError> + 'static,
|
||||
{
|
||||
if let Some(e) = self.err {
|
||||
return e.into();
|
||||
if let Some(err) = self.err {
|
||||
return err.into();
|
||||
}
|
||||
|
||||
RequestSender::Rc(self.req.head, Some(self.extra_headers)).send_stream(
|
||||
|
@ -211,8 +211,8 @@ impl FrozenSendBuilder {
|
|||
|
||||
/// Complete request construction and send an empty body.
|
||||
pub fn send(self) -> SendClientRequest {
|
||||
if let Some(e) = self.err {
|
||||
return e.into();
|
||||
if let Some(err) = self.err {
|
||||
return err.into();
|
||||
}
|
||||
|
||||
RequestSender::Rc(self.req.head, Some(self.extra_headers)).send(
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue