mirror of https://github.com/fafhrd91/actix-web
Merge branch 'master' into rm-config
This commit is contained in:
commit
d65f3aaf70
|
@ -6,4 +6,4 @@ ci-min-test = "hack check --workspace --no-default-features --tests --examples"
|
|||
ci-default = "check --workspace --bins --tests --examples"
|
||||
ci-full = "check --workspace --all-features --bins --tests --examples"
|
||||
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture"
|
||||
ci-doctest = "hack test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"
|
||||
|
|
|
@ -24,6 +24,8 @@ jobs:
|
|||
runs-on: ${{ matrix.target.os }}
|
||||
|
||||
env:
|
||||
CI: 1
|
||||
CARGO_INCREMENTAL: 0
|
||||
VCPKGRS_DYNAMIC: 1
|
||||
|
||||
steps:
|
||||
|
@ -80,13 +82,6 @@ jobs:
|
|||
command: ci-test
|
||||
args: --skip=test_reading_deflate_encoding_large_random_rustls
|
||||
|
||||
- name: doc tests
|
||||
# due to unknown issue with running doc tests on macOS
|
||||
if: matrix.target.os == 'ubuntu-latest'
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with: { command: ci-doctest }
|
||||
|
||||
- name: Generate coverage file
|
||||
if: >
|
||||
matrix.target.os == 'ubuntu-latest'
|
||||
|
@ -106,5 +101,36 @@ jobs:
|
|||
|
||||
- name: Clear the cargo caches
|
||||
run: |
|
||||
cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean
|
||||
cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
|
||||
cargo-cache
|
||||
|
||||
rustdoc:
|
||||
name: rustdoc
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-x86_64-unknown-linux-gnu
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Generate Cargo.lock
|
||||
uses: actions-rs/cargo@v1
|
||||
with: { command: generate-lockfile }
|
||||
- name: Cache Dependencies
|
||||
uses: Swatinem/rust-cache@v1.3.0
|
||||
|
||||
- name: Install cargo-hack
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-hack
|
||||
|
||||
- name: doc tests
|
||||
uses: actions-rs/cargo@v1
|
||||
timeout-minutes: 40
|
||||
with: { command: ci-doctest }
|
||||
|
|
20
CHANGES.md
20
CHANGES.md
|
@ -1,17 +1,29 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
### Changed
|
||||
* Asscociated type `FromRequest::Config` was removed. [#2233]
|
||||
|
||||
[#2233]: https://github.com/actix/actix-web/pull/2233
|
||||
|
||||
## 4.0.0-beta.9 - 2021-09-09
|
||||
### Added
|
||||
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
||||
|
||||
### Changes
|
||||
### Changed
|
||||
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
|
||||
* Move `BaseHttpResponse` to `dev::Response`. [#2379]
|
||||
* Enable `TestRequest::param` to accept more than just static strings. [#2172]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Removed
|
||||
* `FromRequest::Config` was removed. [#2233]
|
||||
### Fixed
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
* Re-export correct type at `web::HttpResponse`. [#2379]
|
||||
|
||||
[#2233]: https://github.com/actix/actix-web/pull/2233
|
||||
[#2172]: https://github.com/actix/actix-web/pull/2172
|
||||
[#2325]: https://github.com/actix/actix-web/pull/2325
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2379]: https://github.com/actix/actix-web/pull/2379
|
||||
|
||||
|
||||
## 4.0.0-beta.8 - 2021-06-26
|
||||
|
|
18
Cargo.toml
18
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-web"
|
||||
version = "4.0.0-beta.8"
|
||||
version = "4.0.0-beta.9"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
|
@ -70,15 +70,15 @@ __compress = []
|
|||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-macros = "0.2.1"
|
||||
actix-router = "0.5.0-beta.1"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
|
||||
|
||||
actix-web-codegen = "0.5.0-beta.2"
|
||||
actix-http = "3.0.0-beta.9"
|
||||
actix-web-codegen = "0.5.0-beta.4"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
|
@ -100,14 +100,14 @@ regex = "1.4"
|
|||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
smallvec = "1.6"
|
||||
smallvec = "1.6.1"
|
||||
socket2 = "0.4.0"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
time = { version = "0.3", default-features = false, features = ["formatting"] }
|
||||
url = "2.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
|
||||
awc = { version = "3.0.0-beta.7", features = ["openssl"] }
|
||||
awc = { version = "3.0.0-beta.8", features = ["openssl"] }
|
||||
|
||||
brotli2 = "0.3.2"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
|
@ -119,6 +119,10 @@ rcgen = "0.8"
|
|||
tls-openssl = { package = "openssl", version = "0.10.9" }
|
||||
tls-rustls = { package = "rustls", version = "0.19.0" }
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
opt-level = 3
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
* The default `NormalizePath` behavior now strips trailing slashes by default. This was
|
||||
previously documented to be the case in v3 but the behavior now matches. The effect is that
|
||||
routes defined with trailing slashes will become inaccessible when
|
||||
using `NormalizePath::default()`.
|
||||
using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
|
||||
It is advised that the `new` method be used instead.
|
||||
|
||||
Before: `#[get("/test/")]`
|
||||
After: `#[get("/test")]`
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
<p>
|
||||
|
||||
[](https://crates.io/crates/actix-web)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.8)
|
||||
[](https://docs.rs/actix-web/4.0.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.8)
|
||||
[](https://deps.rs/crate/actix-web/4.0.0-beta.9)
|
||||
<br />
|
||||
[](https://github.com/actix/actix-web/actions)
|
||||
[](https://codecov.io/gh/actix/actix-web)
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.6.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-files"
|
||||
version = "0.6.0-beta.6"
|
||||
version = "0.6.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Static file serving for Actix Web"
|
||||
keywords = ["actix", "http", "async", "futures"]
|
||||
|
@ -15,8 +15,8 @@ name = "actix_files"
|
|||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.8", default-features = false }
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
|
@ -33,5 +33,5 @@ percent-encoding = "2.1"
|
|||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-web = "4.0.0-beta.8"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> Static file serving for Actix Web
|
||||
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.6)
|
||||
[](https://docs.rs/actix-files/0.6.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.6)
|
||||
[](https://deps.rs/crate/actix-files/0.6.0-beta.7)
|
||||
[](https://crates.io/crates/actix-files)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.5 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
[package]
|
||||
name = "actix-http-test"
|
||||
version = "3.0.0-beta.4"
|
||||
version = "3.0.0-beta.5"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Various helpers for Actix applications to use during testing"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-http-test/"
|
||||
categories = ["network-programming", "asynchronous",
|
||||
categories = [
|
||||
"network-programming",
|
||||
"asynchronous",
|
||||
"web-programming::http-server",
|
||||
"web-programming::websocket"]
|
||||
"web-programming::websocket",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
exclude = [".gitignore", ".cargo/config"]
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
|
@ -35,7 +35,7 @@ actix-tls = "3.0.0-beta.5"
|
|||
actix-utils = "3.0.0"
|
||||
actix-rt = "2.2"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
awc = { version = "3.0.0-beta.7", default-features = false }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
bytes = "1"
|
||||
|
@ -47,9 +47,8 @@ serde = "1.0"
|
|||
serde_json = "1.0"
|
||||
slab = "0.4"
|
||||
serde_urlencoded = "0.7"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> Various helpers for Actix applications to use during testing.
|
||||
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.4)
|
||||
[](https://docs.rs/actix-http-test/3.0.0-beta.5)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br>
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.4)
|
||||
[](https://deps.rs/crate/actix-http-test/3.0.0-beta.5)
|
||||
[](https://crates.io/crates/actix-http-test)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -7,8 +7,7 @@
|
|||
#[cfg(feature = "openssl")]
|
||||
extern crate tls_openssl as openssl;
|
||||
|
||||
use std::sync::mpsc;
|
||||
use std::{net, thread, time};
|
||||
use std::{net, sync::mpsc, thread, time::Duration};
|
||||
|
||||
use actix_codec::{AsyncRead, AsyncWrite, Framed};
|
||||
use actix_rt::{net::TcpStream, System};
|
||||
|
@ -95,15 +94,15 @@ pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>(
|
|||
.set_alpn_protos(b"\x02h2\x08http/1.1")
|
||||
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
.ssl(builder.build())
|
||||
}
|
||||
#[cfg(not(feature = "openssl"))]
|
||||
{
|
||||
Connector::new()
|
||||
.conn_lifetime(time::Duration::from_secs(0))
|
||||
.timeout(time::Duration::from_millis(30000))
|
||||
.conn_lifetime(Duration::from_secs(0))
|
||||
.timeout(Duration::from_millis(30000))
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,16 +1,25 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
### Changes
|
||||
|
||||
|
||||
## 3.0.0-beta.10 - 2021-09-09
|
||||
### Changed
|
||||
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
### Fixed
|
||||
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
|
||||
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||
|
||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||
[#2375]: https://github.com/actix/actix-web/pull/2375
|
||||
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||
[#2377]: https://github.com/actix/actix-web/pull/2377
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-08-09
|
||||
## 3.0.0-beta.9 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-beta.9"
|
||||
version = "3.0.0-beta.10"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "HTTP primitives for the Actix ecosystem"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
|
@ -60,6 +60,7 @@ futures-util = { version = "0.3.7", default-features = false, features = ["alloc
|
|||
h2 = "0.3.1"
|
||||
http = "0.2.2"
|
||||
httparse = "1.5.1"
|
||||
httpdate = "1.0.1"
|
||||
itoa = "0.4"
|
||||
language-tags = "0.3"
|
||||
local-channel = "0.1"
|
||||
|
@ -70,11 +71,8 @@ percent-encoding = "2.1"
|
|||
pin-project = "1.0.0"
|
||||
pin-project-lite = "0.2"
|
||||
rand = "0.8"
|
||||
regex = "1.3"
|
||||
serde = "1.0"
|
||||
sha-1 = "0.9"
|
||||
smallvec = "1.6"
|
||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||
smallvec = "1.6.1"
|
||||
tokio = { version = "1.2", features = ["sync"] }
|
||||
|
||||
# compression
|
||||
|
@ -86,17 +84,18 @@ trust-dns-resolver = { version = "0.20.0", optional = true }
|
|||
|
||||
[dev-dependencies]
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
async-stream = "0.3"
|
||||
criterion = { version = "0.3", features = ["html_reports"] }
|
||||
env_logger = "0.8"
|
||||
rcgen = "0.8"
|
||||
regex = "1.3"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tls-openssl = { version = "0.10", package = "openssl" }
|
||||
tls-rustls = { version = "0.19", package = "rustls" }
|
||||
webpki = { version = "0.21.0" }
|
||||
webpki = { version = "0.21" }
|
||||
|
||||
[[example]]
|
||||
name = "ws"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.9)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.10)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.9)
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.10)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut};
|
|||
use futures_core::ready;
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
use super::BodySize;
|
||||
|
||||
/// An interface for response bodies.
|
||||
|
@ -47,7 +45,6 @@ impl MessageBody for () {
|
|||
impl<B> MessageBody for Box<B>
|
||||
where
|
||||
B: MessageBody + Unpin,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
|
@ -66,7 +63,6 @@ where
|
|||
impl<B> MessageBody for Pin<Box<B>>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = B::Error;
|
||||
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
use std::cell::Cell;
|
||||
use std::fmt::Write;
|
||||
use std::rc::Rc;
|
||||
use std::time::Duration;
|
||||
use std::{fmt, net};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
fmt::{self, Write},
|
||||
net,
|
||||
rc::Rc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
use actix_rt::{
|
||||
task::JoinHandle,
|
||||
time::{interval, sleep_until, Instant, Sleep},
|
||||
};
|
||||
use bytes::BytesMut;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
/// "Sun, 06 Nov 1994 08:49:37 GMT".len()
|
||||
const DATE_VALUE_LENGTH: usize = 29;
|
||||
pub(crate) const DATE_VALUE_LENGTH: usize = 29;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
/// Server keep-alive setting
|
||||
|
@ -206,12 +207,7 @@ impl Date {
|
|||
|
||||
fn update(&mut self) {
|
||||
self.pos = 0;
|
||||
write!(
|
||||
self,
|
||||
"{}",
|
||||
OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,11 +265,11 @@ impl DateService {
|
|||
}
|
||||
|
||||
// TODO: move to a util module for testing all spawn handle drop style tasks.
|
||||
#[cfg(test)]
|
||||
/// Test Module for checking the drop state of certain async tasks that are spawned
|
||||
/// with `actix_rt::spawn`
|
||||
///
|
||||
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task
|
||||
#[cfg(test)]
|
||||
mod notify_on_drop {
|
||||
use std::cell::RefCell;
|
||||
|
||||
|
@ -283,9 +279,8 @@ mod notify_on_drop {
|
|||
|
||||
/// Check if the spawned task is dropped.
|
||||
///
|
||||
/// # Panic:
|
||||
///
|
||||
/// When there was no `NotifyOnDrop` instance on current thread
|
||||
/// # Panics
|
||||
/// Panics when there was no `NotifyOnDrop` instance on current thread.
|
||||
pub(crate) fn is_dropped() -> bool {
|
||||
NOTIFY_DROPPED.with(|bool| {
|
||||
bool.borrow()
|
||||
|
|
|
@ -80,7 +80,7 @@ where
|
|||
let encoding = headers
|
||||
.get(&CONTENT_ENCODING)
|
||||
.and_then(|val| val.to_str().ok())
|
||||
.map(ContentEncoding::from)
|
||||
.and_then(|x| x.parse().ok())
|
||||
.unwrap_or(ContentEncoding::Identity);
|
||||
|
||||
Self::new(stream, encoding)
|
||||
|
|
|
@ -29,7 +29,7 @@ use crate::{
|
|||
header::{ContentEncoding, CONTENT_ENCODING},
|
||||
HeaderValue, StatusCode,
|
||||
},
|
||||
Error, ResponseHead,
|
||||
ResponseHead,
|
||||
};
|
||||
|
||||
use super::Writer;
|
||||
|
@ -107,7 +107,6 @@ enum EncoderBody<B> {
|
|||
impl<B> MessageBody for EncoderBody<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
|
@ -142,7 +141,6 @@ where
|
|||
impl<B> MessageBody for Encoder<B>
|
||||
where
|
||||
B: MessageBody,
|
||||
B::Error: Into<Error>,
|
||||
{
|
||||
type Error = EncoderError<B::Error>;
|
||||
|
||||
|
|
|
@ -65,7 +65,9 @@ where
|
|||
let next =
|
||||
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
|
||||
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
|
||||
Poll::Ready(Some(Err(err))) => return Poll::Ready(Err(err.into())),
|
||||
Poll::Ready(Some(Err(err))) => {
|
||||
return Poll::Ready(Err(err.into()))
|
||||
}
|
||||
Poll::Ready(None) => Poll::Ready(None),
|
||||
Poll::Pending => Poll::Pending,
|
||||
};
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::{convert::Infallible, str::FromStr};
|
||||
use std::{convert::TryFrom, str::FromStr};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
use http::header::InvalidHeaderValue;
|
||||
|
||||
use crate::{
|
||||
|
@ -8,8 +9,16 @@ use crate::{
|
|||
HttpMessage,
|
||||
};
|
||||
|
||||
/// Error return when a content encoding is unknown.
|
||||
///
|
||||
/// Example: 'compress'
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "unsupported content encoding")]
|
||||
pub struct ContentEncodingParseError;
|
||||
|
||||
/// Represents a supported content encoding.
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ContentEncoding {
|
||||
/// Automatically select encoding based on encoding negotiation.
|
||||
Auto,
|
||||
|
@ -37,7 +46,7 @@ impl ContentEncoding {
|
|||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
||||
}
|
||||
|
||||
/// Convert content encoding to string
|
||||
/// Convert content encoding to string.
|
||||
#[inline]
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
|
@ -48,18 +57,6 @@ impl ContentEncoding {
|
|||
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
||||
}
|
||||
}
|
||||
|
||||
/// Default Q-factor (quality) value.
|
||||
#[inline]
|
||||
pub fn quality(self) -> f64 {
|
||||
match self {
|
||||
ContentEncoding::Br => 1.1,
|
||||
ContentEncoding::Gzip => 1.0,
|
||||
ContentEncoding::Deflate => 0.9,
|
||||
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
|
||||
ContentEncoding::Zstd => 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ContentEncoding {
|
||||
|
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
|
|||
}
|
||||
|
||||
impl FromStr for ContentEncoding {
|
||||
type Err = Infallible;
|
||||
type Err = ContentEncodingParseError;
|
||||
|
||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self::from(val))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ContentEncoding {
|
||||
fn from(val: &str) -> ContentEncoding {
|
||||
let val = val.trim();
|
||||
|
||||
if val.eq_ignore_ascii_case("br") {
|
||||
ContentEncoding::Br
|
||||
Ok(ContentEncoding::Br)
|
||||
} else if val.eq_ignore_ascii_case("gzip") {
|
||||
ContentEncoding::Gzip
|
||||
Ok(ContentEncoding::Gzip)
|
||||
} else if val.eq_ignore_ascii_case("deflate") {
|
||||
ContentEncoding::Deflate
|
||||
Ok(ContentEncoding::Deflate)
|
||||
} else if val.eq_ignore_ascii_case("zstd") {
|
||||
ContentEncoding::Zstd
|
||||
Ok(ContentEncoding::Zstd)
|
||||
} else {
|
||||
ContentEncoding::default()
|
||||
Err(ContentEncodingParseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ContentEncoding {
|
||||
type Error = ContentEncodingParseError;
|
||||
|
||||
fn try_from(val: &str) -> Result<Self, Self::Error> {
|
||||
val.parse()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for ContentEncoding {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
use std::{fmt, io::Write, str::FromStr, time::SystemTime};
|
||||
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
|
||||
use crate::{
|
||||
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue,
|
||||
helpers::MutWriter,
|
||||
};
|
||||
|
||||
/// A timestamp with HTTP formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(SystemTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match httpdate::parse_http_date(s) {
|
||||
Ok(sys_time) => Ok(HttpDate(sys_time)),
|
||||
Err(_) => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let date_str = httpdate::fmt_http_date(self.0);
|
||||
f.write_str(&date_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
|
||||
let mut wrt = MutWriter(&mut buf);
|
||||
|
||||
// unwrap: date output is known to be well formed and of known length
|
||||
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
|
||||
|
||||
HeaderValue::from_maybe_shared(buf.split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys_time: SystemTime) -> HttpDate {
|
||||
HttpDate(sys_time)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
|
||||
sys_time
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::time::Duration;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn date_header() {
|
||||
macro_rules! assert_parsed_date {
|
||||
($case:expr, $exp:expr) => {
|
||||
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
|
||||
};
|
||||
}
|
||||
|
||||
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
|
||||
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
|
||||
|
||||
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
|
||||
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
|
||||
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
use std::{
|
||||
fmt,
|
||||
io::Write,
|
||||
str::FromStr,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
use bytes::buf::BufMut;
|
||||
use bytes::BytesMut;
|
||||
use http::header::{HeaderValue, InvalidHeaderValue};
|
||||
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::IntoHeaderValue;
|
||||
use crate::time_parser;
|
||||
|
||||
/// A timestamp with HTTP formatting and parsing.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct HttpDate(OffsetDateTime);
|
||||
|
||||
impl FromStr for HttpDate {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
|
||||
match time_parser::parse_http_date(s) {
|
||||
Some(t) => Ok(HttpDate(t.assume_utc())),
|
||||
None => Err(ParseError::Header),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for HttpDate {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SystemTime> for HttpDate {
|
||||
fn from(sys: SystemTime) -> HttpDate {
|
||||
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoHeaderValue for HttpDate {
|
||||
type Error = InvalidHeaderValue;
|
||||
|
||||
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
|
||||
let mut wrt = BytesMut::with_capacity(29).writer();
|
||||
write!(
|
||||
wrt,
|
||||
"{}",
|
||||
self.0
|
||||
.to_offset(UtcOffset::UTC)
|
||||
.format("%a, %d %b %Y %H:%M:%S GMT")
|
||||
)
|
||||
.unwrap();
|
||||
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HttpDate> for SystemTime {
|
||||
fn from(date: HttpDate) -> SystemTime {
|
||||
let dt = date.0;
|
||||
let epoch = OffsetDateTime::unix_epoch();
|
||||
|
||||
UNIX_EPOCH + (dt - epoch)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::HttpDate;
|
||||
use time::{date, time, PrimitiveDateTime};
|
||||
|
||||
#[test]
|
||||
fn test_date() {
|
||||
let nov_07 = HttpDate(
|
||||
PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sunday, 07-Nov-94 08:48:37 GMT"
|
||||
.parse::<HttpDate>()
|
||||
.unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert_eq!(
|
||||
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
|
||||
nov_07
|
||||
);
|
||||
assert!("this-is-no-date".parse::<HttpDate>().is_err());
|
||||
}
|
||||
}
|
|
@ -3,12 +3,12 @@
|
|||
mod charset;
|
||||
mod content_encoding;
|
||||
mod extended;
|
||||
mod httpdate;
|
||||
mod http_date;
|
||||
mod quality_item;
|
||||
|
||||
pub use self::charset::Charset;
|
||||
pub use self::content_encoding::ContentEncoding;
|
||||
pub use self::extended::{parse_extended_value, ExtendedValue};
|
||||
pub use self::httpdate::HttpDate;
|
||||
pub use self::http_date::HttpDate;
|
||||
pub use self::quality_item::{q, qitem, Quality, QualityItem};
|
||||
pub use language_tags::LanguageTag;
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
use std::{
|
||||
cmp,
|
||||
convert::{TryFrom, TryInto},
|
||||
fmt, str,
|
||||
fmt,
|
||||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
use crate::error::ParseError;
|
||||
|
||||
const MAX_QUALITY: u16 = 1000;
|
||||
const MAX_FLOAT_QUALITY: f32 = 1.0;
|
||||
|
||||
|
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||
type Err = crate::error::ParseError;
|
||||
impl<T: FromStr> FromStr for QualityItem<T> {
|
||||
type Err = ParseError;
|
||||
|
||||
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> {
|
||||
fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
|
||||
if !qitem_str.is_ascii() {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
// Set defaults used if parsing fails.
|
||||
|
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
|||
if parts[0].len() < 2 {
|
||||
// Can't possibly be an attribute since an attribute needs at least a name followed
|
||||
// by an equals sign. And bare identifiers are forbidden.
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let start = &parts[0][0..2];
|
||||
|
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
|||
let q_val = &parts[0][2..];
|
||||
if q_val.len() > 5 {
|
||||
// longer than 5 indicates an over-precise q-factor
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
let q_value = q_val
|
||||
.parse::<f32>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
if (0f32..=1f32).contains(&q_value) {
|
||||
quality = q_value;
|
||||
raw_item = parts[1];
|
||||
} else {
|
||||
return Err(crate::error::ParseError::Header);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let item = raw_item
|
||||
.parse::<T>()
|
||||
.map_err(|_| crate::error::ParseError::Header)?;
|
||||
let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
|
||||
|
||||
// we already checked above that the quality is within range
|
||||
Ok(QualityItem::new(item, Quality::from_f32(quality)))
|
||||
|
@ -224,7 +223,7 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
impl str::FromStr for Encoding {
|
||||
impl FromStr for Encoding {
|
||||
type Err = crate::error::ParseError;
|
||||
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
||||
use Encoding::*;
|
||||
|
|
|
@ -44,7 +44,6 @@ mod request;
|
|||
mod response;
|
||||
mod response_builder;
|
||||
mod service;
|
||||
mod time_parser;
|
||||
|
||||
pub mod error;
|
||||
pub mod h1;
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
|
||||
pub(crate) fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
|
||||
try_parse_rfc_1123(time)
|
||||
.or_else(|| try_parse_rfc_850(time))
|
||||
.or_else(|| try_parse_asctime(time))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
|
||||
///
|
||||
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
|
||||
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
|
||||
///
|
||||
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
|
||||
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
|
||||
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
|
||||
|
||||
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
|
||||
// we consider the year as part of this century if it's within the next 50 years,
|
||||
// otherwise we consider as part of the previous century.
|
||||
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let century_start_year = (now.year() / 100) * 100;
|
||||
let mut expanded_year = century_start_year + dt.year();
|
||||
|
||||
if expanded_year > now.year() + 50 {
|
||||
expanded_year -= 100;
|
||||
}
|
||||
|
||||
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
|
||||
Some(PrimitiveDateTime::new(date, dt.time()))
|
||||
}
|
||||
|
||||
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
|
||||
///
|
||||
/// Eg: `Wed Feb 13 15:46:11 2013`
|
||||
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
|
||||
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use time::{date, time};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_rfc_850_year_shift() {
|
||||
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
|
||||
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
|
||||
|
||||
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
|
||||
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
|
||||
}
|
||||
}
|
|
@ -183,6 +183,7 @@ async fn test_chunked_payload() {
|
|||
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
|
||||
None => panic!("Failed to find size in HTTP Response: {}", data),
|
||||
};
|
||||
|
||||
size
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.4.0-beta.6 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
[package]
|
||||
name = "actix-multipart"
|
||||
version = "0.4.0-beta.5"
|
||||
version = "0.4.0-beta.6"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Multipart form support for Actix Web"
|
||||
readme = "README.md"
|
||||
keywords = ["http", "web", "framework", "async", "futures"]
|
||||
homepage = "https://actix.rs"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
documentation = "https://docs.rs/actix-multipart"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
|
@ -16,7 +14,7 @@ name = "actix_multipart"
|
|||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.0.0-beta.8", default-features = false }
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
actix-utils = "3.0.0"
|
||||
|
||||
bytes = "1"
|
||||
|
@ -31,6 +29,6 @@ twoway = "0.2"
|
|||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
tokio = { version = "1", features = ["sync"] }
|
||||
tokio-stream = "0.1"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> Multipart form support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.5)
|
||||
[](https://docs.rs/actix-multipart/0.4.0-beta.6)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.5)
|
||||
[](https://deps.rs/crate/actix-multipart/0.4.0-beta.6)
|
||||
[](https://crates.io/crates/actix-multipart)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.2 - 2021-09-09
|
||||
* Introduce `ResourceDef::join`. [#380]
|
||||
* Disallow prefix routes with tail segments. [#379]
|
||||
* Enforce path separators on dynamic prefixes. [#378]
|
||||
* Improve malformed path error message. [#384]
|
||||
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
* Support multi-pattern prefixes and joins. [#2356]
|
||||
* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
|
||||
* Support `build_resource_path` on multi-pattern resources. [#2356]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
[#378]: https://github.com/actix/actix-net/pull/378
|
||||
|
@ -14,6 +20,7 @@
|
|||
[#380]: https://github.com/actix/actix-net/pull/380
|
||||
[#384]: https://github.com/actix/actix-net/pull/384
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
[#2356]: https://github.com/actix/actix-web/pull/2356
|
||||
|
||||
|
||||
## 0.5.0-beta.1 - 2021-07-20
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-router"
|
||||
version = "0.5.0-beta.1"
|
||||
version = "0.5.0-beta.2"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
|
||||
|
@ -8,7 +8,7 @@ authors = [
|
|||
]
|
||||
description = "Resource path matching and router"
|
||||
keywords = ["actix", "router", "routing"]
|
||||
repository = "https://github.com/actix/actix-net.git"
|
||||
repository = "https://github.com/actix/actix-web.git"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2018"
|
||||
|
||||
|
|
|
@ -31,13 +31,13 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
/// # Pattern Format and Matching Behavior
|
||||
///
|
||||
/// Resource pattern is defined as a string of zero or more _segments_ where each segment is
|
||||
/// preceeded by a slash `/`.
|
||||
/// preceded by a slash `/`.
|
||||
///
|
||||
/// This means that pattern string __must__ either be empty or begin with a slash (`/`).
|
||||
/// This also implies that a trailing slash in pattern defines an empty segment.
|
||||
/// For example, the pattern `"/user/"` has two segments: `["user", ""]`
|
||||
///
|
||||
/// A key point to undertand is that `ResourceDef` matches segments, not strings.
|
||||
/// A key point to underhand is that `ResourceDef` matches segments, not strings.
|
||||
/// It matches segments individually.
|
||||
/// For example, the pattern `/user/` is not considered a prefix for the path `/user/123/456`,
|
||||
/// because the second segment doesn't match: `["user", ""]` vs `["user", "123", "456"]`.
|
||||
|
@ -220,17 +220,15 @@ pub struct ResourceDef {
|
|||
name: Option<String>,
|
||||
|
||||
/// Pattern that generated the resource definition.
|
||||
///
|
||||
/// `None` when pattern type is `DynamicSet`.
|
||||
patterns: Patterns,
|
||||
|
||||
is_prefix: bool,
|
||||
|
||||
/// Pattern type.
|
||||
pat_type: PatternType,
|
||||
|
||||
/// List of segments that compose the pattern, in order.
|
||||
///
|
||||
/// `None` when pattern type is `DynamicSet`.
|
||||
segments: Option<Vec<PatternSegment>>,
|
||||
segments: Vec<PatternSegment>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
|
@ -248,9 +246,6 @@ enum PatternType {
|
|||
/// Single constant/literal segment.
|
||||
Static(String),
|
||||
|
||||
/// Single constant/literal prefix segment.
|
||||
Prefix(String),
|
||||
|
||||
/// Single regular expression and list of dynamic segment names.
|
||||
Dynamic(Regex, Vec<&'static str>),
|
||||
|
||||
|
@ -284,45 +279,7 @@ impl ResourceDef {
|
|||
/// ```
|
||||
pub fn new<T: IntoPatterns>(paths: T) -> Self {
|
||||
profile_method!(new);
|
||||
|
||||
match paths.patterns() {
|
||||
Patterns::Single(pattern) => ResourceDef::from_single_pattern(&pattern, false),
|
||||
|
||||
// since zero length pattern sets are possible
|
||||
// just return a useless `ResourceDef`
|
||||
Patterns::List(patterns) if patterns.is_empty() => ResourceDef {
|
||||
id: 0,
|
||||
name: None,
|
||||
patterns: Patterns::List(patterns),
|
||||
pat_type: PatternType::DynamicSet(RegexSet::empty(), Vec::new()),
|
||||
segments: None,
|
||||
},
|
||||
|
||||
Patterns::List(patterns) => {
|
||||
let mut re_set = Vec::with_capacity(patterns.len());
|
||||
let mut pattern_data = Vec::new();
|
||||
|
||||
for pattern in &patterns {
|
||||
match ResourceDef::parse(pattern, false, true) {
|
||||
(PatternType::Dynamic(re, names), _) => {
|
||||
re_set.push(re.as_str().to_owned());
|
||||
pattern_data.push((re, names));
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
let pattern_re_set = RegexSet::new(re_set).unwrap();
|
||||
|
||||
ResourceDef {
|
||||
id: 0,
|
||||
name: None,
|
||||
patterns: Patterns::List(patterns),
|
||||
pat_type: PatternType::DynamicSet(pattern_re_set, pattern_data),
|
||||
segments: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
Self::new2(paths, false)
|
||||
}
|
||||
|
||||
/// Constructs a new resource definition using a pattern that performs prefix matching.
|
||||
|
@ -348,9 +305,9 @@ impl ResourceDef {
|
|||
/// assert!(!resource.is_match("user/123/stars"));
|
||||
/// assert!(!resource.is_match("/foo"));
|
||||
/// ```
|
||||
pub fn prefix(path: &str) -> Self {
|
||||
pub fn prefix<T: IntoPatterns>(paths: T) -> Self {
|
||||
profile_method!(prefix);
|
||||
ResourceDef::from_single_pattern(path, true)
|
||||
ResourceDef::new2(paths, true)
|
||||
}
|
||||
|
||||
/// Constructs a new resource definition using a string pattern that performs prefix matching,
|
||||
|
@ -375,7 +332,7 @@ impl ResourceDef {
|
|||
/// ```
|
||||
pub fn root_prefix(path: &str) -> Self {
|
||||
profile_method!(root_prefix);
|
||||
ResourceDef::prefix(&insert_slash(path))
|
||||
ResourceDef::prefix(insert_slash(path).into_owned())
|
||||
}
|
||||
|
||||
/// Returns a numeric resource ID.
|
||||
|
@ -453,17 +410,14 @@ impl ResourceDef {
|
|||
/// assert!(!ResourceDef::new("/user").is_prefix());
|
||||
/// ```
|
||||
pub fn is_prefix(&self) -> bool {
|
||||
match &self.pat_type {
|
||||
PatternType::Prefix(_) => true,
|
||||
PatternType::Dynamic(re, _) if !re.as_str().ends_with('$') => true,
|
||||
_ => false,
|
||||
}
|
||||
self.is_prefix
|
||||
}
|
||||
|
||||
/// Returns the pattern string that generated the resource definition.
|
||||
///
|
||||
/// Returns `None` if definition was constructed with multiple patterns.
|
||||
/// See [`patterns_iter`][Self::pattern_iter].
|
||||
/// If definition is constructed with multiple patterns, the first pattern is returned. To get
|
||||
/// all patterns, use [`patterns_iter`][Self::pattern_iter]. If resource has 0 patterns,
|
||||
/// returns `None`.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
|
@ -472,11 +426,11 @@ impl ResourceDef {
|
|||
/// assert_eq!(resource.pattern().unwrap(), "/user/{id}");
|
||||
///
|
||||
/// let mut resource = ResourceDef::new(["/profile", "/user/{id}"]);
|
||||
/// assert!(resource.pattern().is_none());
|
||||
/// assert_eq!(resource.pattern(), Some("/profile"));
|
||||
pub fn pattern(&self) -> Option<&str> {
|
||||
match &self.patterns {
|
||||
Patterns::Single(pattern) => Some(pattern.as_str()),
|
||||
Patterns::List(_) => None,
|
||||
Patterns::List(patterns) => patterns.first().map(AsRef::as_ref),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -563,8 +517,8 @@ impl ResourceDef {
|
|||
.collect::<Vec<_>>();
|
||||
|
||||
match patterns.len() {
|
||||
1 => ResourceDef::from_single_pattern(&patterns[0], other.is_prefix()),
|
||||
_ => ResourceDef::new(patterns),
|
||||
1 => ResourceDef::new2(&patterns[0], other.is_prefix()),
|
||||
_ => ResourceDef::new2(patterns, other.is_prefix()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -609,11 +563,10 @@ impl ResourceDef {
|
|||
// `self.find_match(path).is_some()`
|
||||
// but this skips some checks and uses potentially faster regex methods
|
||||
|
||||
match self.pat_type {
|
||||
PatternType::Static(ref s) => s == path,
|
||||
PatternType::Prefix(ref prefix) => is_prefix(prefix, path),
|
||||
PatternType::Dynamic(ref re, _) => re.is_match(path),
|
||||
PatternType::DynamicSet(ref re, _) => re.is_match(path),
|
||||
match &self.pat_type {
|
||||
PatternType::Static(pattern) => self.static_match(pattern, path).is_some(),
|
||||
PatternType::Dynamic(re, _) => re.is_match(path),
|
||||
PatternType::DynamicSet(re, _) => re.is_match(path),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -656,11 +609,7 @@ impl ResourceDef {
|
|||
profile_method!(find_match);
|
||||
|
||||
match &self.pat_type {
|
||||
PatternType::Static(segment) if path == segment => Some(segment.len()),
|
||||
PatternType::Static(_) => None,
|
||||
|
||||
PatternType::Prefix(prefix) if is_prefix(prefix, path) => Some(prefix.len()),
|
||||
PatternType::Prefix(_) => None,
|
||||
PatternType::Static(pattern) => self.static_match(pattern, path),
|
||||
|
||||
PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()),
|
||||
|
||||
|
@ -753,10 +702,10 @@ impl ResourceDef {
|
|||
let path_str = path.path();
|
||||
|
||||
let (matched_len, matched_vars) = match &self.pat_type {
|
||||
PatternType::Static(_) | PatternType::Prefix(_) => {
|
||||
PatternType::Static(pattern) => {
|
||||
profile_section!(pattern_static_or_prefix);
|
||||
|
||||
match self.find_match(path_str) {
|
||||
match self.static_match(pattern, path_str) {
|
||||
Some(len) => (len, None),
|
||||
None => return false,
|
||||
}
|
||||
|
@ -844,13 +793,10 @@ impl ResourceDef {
|
|||
F: FnMut(&str) -> Option<I>,
|
||||
I: AsRef<str>,
|
||||
{
|
||||
for el in match self.segments {
|
||||
Some(ref segments) => segments,
|
||||
None => return false,
|
||||
} {
|
||||
match *el {
|
||||
PatternSegment::Const(ref val) => path.push_str(val),
|
||||
PatternSegment::Var(ref name) => match vars(name) {
|
||||
for segment in &self.segments {
|
||||
match segment {
|
||||
PatternSegment::Const(val) => path.push_str(val),
|
||||
PatternSegment::Var(name) => match vars(name) {
|
||||
Some(val) => path.push_str(val.as_ref()),
|
||||
_ => return false,
|
||||
},
|
||||
|
@ -864,8 +810,8 @@ impl ResourceDef {
|
|||
///
|
||||
/// Returns `true` on success.
|
||||
///
|
||||
/// Resource paths can not be built from multi-pattern resources; this call will always return
|
||||
/// false and will not add anything to the string buffer.
|
||||
/// For multi-pattern resources, the first pattern is used under the assumption that it would be
|
||||
/// equivalent to any other choice.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
|
@ -890,8 +836,8 @@ impl ResourceDef {
|
|||
///
|
||||
/// Returns `true` on success.
|
||||
///
|
||||
/// Resource paths can not be built from multi-pattern resources; this call will always return
|
||||
/// false and will not add anything to the string buffer.
|
||||
/// For multi-pattern resources, the first pattern is used under the assumption that it would be
|
||||
/// equivalent to any other choice.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
|
@ -921,19 +867,69 @@ impl ResourceDef {
|
|||
self.build_resource_path(path, |name| values.get(name).map(AsRef::<str>::as_ref))
|
||||
}
|
||||
|
||||
/// Parse path pattern and create a new instance.
|
||||
fn from_single_pattern(pattern: &str, is_prefix: bool) -> Self {
|
||||
profile_method!(from_single_pattern);
|
||||
/// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`.
|
||||
fn static_match(&self, pattern: &str, path: &str) -> Option<usize> {
|
||||
let rem = path.strip_prefix(pattern)?;
|
||||
|
||||
let pattern = pattern.to_owned();
|
||||
let (pat_type, segments) = ResourceDef::parse(&pattern, is_prefix, false);
|
||||
match self.is_prefix {
|
||||
// resource is not a prefix so an exact match is needed
|
||||
false if rem.is_empty() => Some(pattern.len()),
|
||||
|
||||
// resource is a prefix so rem should start with a path delimiter
|
||||
true if rem.is_empty() || rem.starts_with('/') => Some(pattern.len()),
|
||||
|
||||
// otherwise, no match
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn new2<T: IntoPatterns>(paths: T, is_prefix: bool) -> Self {
|
||||
profile_method!(new2);
|
||||
|
||||
let patterns = paths.patterns();
|
||||
let (pat_type, segments) = match &patterns {
|
||||
Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false),
|
||||
|
||||
// since zero length pattern sets are possible
|
||||
// just return a useless `ResourceDef`
|
||||
Patterns::List(patterns) if patterns.is_empty() => (
|
||||
PatternType::DynamicSet(RegexSet::empty(), Vec::new()),
|
||||
Vec::new(),
|
||||
),
|
||||
|
||||
Patterns::List(patterns) => {
|
||||
let mut re_set = Vec::with_capacity(patterns.len());
|
||||
let mut pattern_data = Vec::new();
|
||||
let mut segments = None;
|
||||
|
||||
for pattern in patterns {
|
||||
match ResourceDef::parse(pattern, is_prefix, true) {
|
||||
(PatternType::Dynamic(re, names), segs) => {
|
||||
re_set.push(re.as_str().to_owned());
|
||||
pattern_data.push((re, names));
|
||||
segments.get_or_insert(segs);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
let pattern_re_set = RegexSet::new(re_set).unwrap();
|
||||
let segments = segments.unwrap_or_else(Vec::new);
|
||||
|
||||
(
|
||||
PatternType::DynamicSet(pattern_re_set, pattern_data),
|
||||
segments,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
ResourceDef {
|
||||
id: 0,
|
||||
name: None,
|
||||
patterns: Patterns::Single(pattern),
|
||||
patterns,
|
||||
is_prefix,
|
||||
pat_type,
|
||||
segments: Some(segments),
|
||||
segments,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1023,20 +1019,15 @@ impl ResourceDef {
|
|||
) -> (PatternType, Vec<PatternSegment>) {
|
||||
profile_method!(parse);
|
||||
|
||||
let mut unprocessed = pattern;
|
||||
|
||||
if !force_dynamic && unprocessed.find('{').is_none() && !unprocessed.ends_with('*') {
|
||||
if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') {
|
||||
// pattern is static
|
||||
|
||||
let tp = if is_prefix {
|
||||
PatternType::Prefix(unprocessed.to_owned())
|
||||
} else {
|
||||
PatternType::Static(unprocessed.to_owned())
|
||||
};
|
||||
|
||||
return (tp, vec![PatternSegment::Const(unprocessed.to_owned())]);
|
||||
return (
|
||||
PatternType::Static(pattern.to_owned()),
|
||||
vec![PatternSegment::Const(pattern.to_owned())],
|
||||
);
|
||||
}
|
||||
|
||||
let mut unprocessed = pattern;
|
||||
let mut segments = Vec::new();
|
||||
let mut re = format!("{}^", REGEX_FLAGS);
|
||||
let mut dyn_segment_count = 0;
|
||||
|
@ -1137,18 +1128,7 @@ impl Eq for ResourceDef {}
|
|||
|
||||
impl PartialEq for ResourceDef {
|
||||
fn eq(&self, other: &ResourceDef) -> bool {
|
||||
self.patterns == other.patterns
|
||||
&& match &self.pat_type {
|
||||
PatternType::Static(_) => matches!(&other.pat_type, PatternType::Static(_)),
|
||||
PatternType::Prefix(_) => matches!(&other.pat_type, PatternType::Prefix(_)),
|
||||
PatternType::Dynamic(re, _) => match &other.pat_type {
|
||||
PatternType::Dynamic(other_re, _) => re.as_str() == other_re.as_str(),
|
||||
_ => false,
|
||||
},
|
||||
PatternType::DynamicSet(_, _) => {
|
||||
matches!(&other.pat_type, PatternType::DynamicSet(..))
|
||||
}
|
||||
}
|
||||
self.patterns == other.patterns && self.is_prefix == other.is_prefix
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1183,15 +1163,6 @@ pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`.
|
||||
fn is_prefix(prefix: &str, path: &str) -> bool {
|
||||
match path.strip_prefix(prefix) {
|
||||
// Ensure the match ends at segment boundary
|
||||
Some(rem) if rem.is_empty() || rem.starts_with('/') => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -1376,6 +1347,24 @@ mod tests {
|
|||
assert!(!re.is_match("/user/2345/sdg"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dynamic_set_prefix() {
|
||||
let re = ResourceDef::prefix(vec!["/u/{id}", "/{id:[[:digit:]]{3}}"]);
|
||||
|
||||
assert_eq!(re.find_match("/u/abc"), Some(6));
|
||||
assert_eq!(re.find_match("/u/abc/123"), Some(6));
|
||||
assert_eq!(re.find_match("/s/user/profile"), None);
|
||||
|
||||
assert_eq!(re.find_match("/123"), Some(4));
|
||||
assert_eq!(re.find_match("/123/456"), Some(4));
|
||||
assert_eq!(re.find_match("/12345"), None);
|
||||
|
||||
let mut path = Path::new("/151/res");
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
assert_eq!(path.get("id").unwrap(), "151");
|
||||
assert_eq!(path.unprocessed(), "/res");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_tail() {
|
||||
let re = ResourceDef::new("/user/-{id}*");
|
||||
|
@ -1602,10 +1591,11 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn multi_pattern_cannot_build_path() {
|
||||
fn multi_pattern_build_path() {
|
||||
let resource = ResourceDef::new(["/user/{id}", "/profile/{id}"]);
|
||||
let mut s = String::new();
|
||||
assert!(!resource.resource_path_from_iter(&mut s, &mut ["123"].iter()));
|
||||
assert!(resource.resource_path_from_iter(&mut s, &mut ["123"].iter()));
|
||||
assert_eq!(s, "/user/123");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1738,8 +1728,12 @@ mod tests {
|
|||
|
||||
join_test!("", "" => "", "/hello", "/");
|
||||
join_test!("/user", "" => "", "/user", "/user/123", "/user11", "user", "user/123");
|
||||
join_test!("", "/user"=> "", "/user", "foo", "/user11", "user", "user/123");
|
||||
join_test!("/user", "/xx"=> "", "", "/", "/user", "/xx", "/userxx", "/user/xx");
|
||||
join_test!("", "/user" => "", "/user", "foo", "/user11", "user", "user/123");
|
||||
join_test!("/user", "/xx" => "", "", "/", "/user", "/xx", "/userxx", "/user/xx");
|
||||
|
||||
join_test!(["/ver/{v}", "/v{v}"], ["/req/{req}", "/{req}"] => "/v1/abc",
|
||||
"/ver/1/abc", "/v1/req/abc", "/ver/1/req/abc", "/v1/abc/def",
|
||||
"/ver1/req/abc/def", "", "/", "/v1/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1777,6 +1771,7 @@ mod tests {
|
|||
match_methods_agree!(prefix "" => "", "/", "/foo");
|
||||
match_methods_agree!(prefix "/user" => "user", "/user", "/users", "/user/123", "/foo");
|
||||
match_methods_agree!(prefix r"/id/{id:\d{3}}" => "/id/123", "/id/1234");
|
||||
match_methods_agree!(["/v{v}", "/ver/{v}"] => "", "s/v", "/v1", "/v1/xx", "/ver/i3/5", "/ver/1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.1.0-beta.4 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-test"
|
||||
version = "0.1.0-beta.3"
|
||||
version = "0.1.0-beta.4"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"Rob Ede <robjtede@icloud.com>",
|
||||
|
@ -20,13 +20,13 @@ openssl = ["tls-openssl", "actix-http/openssl"]
|
|||
|
||||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = [] }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-http-test = "3.0.0-beta.5"
|
||||
actix-service = "2.0.0"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] }
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
|
||||
actix-rt = "2.1"
|
||||
awc = { version = "3.0.0-beta.7", default-features = false, features = ["cookies"] }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false, features = ["cookies"] }
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
|
||||
futures-util = { version = "0.3.7", default-features = false, features = [] }
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 4.0.0-beta.7 - 2021-09-09
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-web-actors"
|
||||
version = "4.0.0-beta.6"
|
||||
version = "4.0.0-beta.7"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "Actix actors support for Actix Web"
|
||||
keywords = ["actix", "http", "web", "framework", "async"]
|
||||
|
@ -16,8 +16,8 @@ path = "src/lib.rs"
|
|||
[dependencies]
|
||||
actix = { version = "0.12.0", default-features = false }
|
||||
actix-codec = "0.4.0"
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-web = { version = "4.0.0-beta.8", default-features = false }
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-web = { version = "4.0.0-beta.9", default-features = false }
|
||||
|
||||
bytes = "1"
|
||||
bytestring = "1"
|
||||
|
@ -29,6 +29,6 @@ tokio = { version = "1", features = ["sync"] }
|
|||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
|
||||
awc = { version = "3.0.0-beta.7", default-features = false }
|
||||
awc = { version = "3.0.0-beta.8", default-features = false }
|
||||
env_logger = "0.8"
|
||||
futures-util = { version = "0.3.7", default-features = false }
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> Actix actors support for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.6)
|
||||
[](https://docs.rs/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6)
|
||||
[](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
|
||||
[](https://crates.io/crates/actix-web-actors)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# Changes
|
||||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 0.5.0-beta.4 - 2021-09-09
|
||||
* In routing macros, paths are now validated at compile time. [#2350]
|
||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-web-codegen"
|
||||
version = "0.5.0-beta.3"
|
||||
version = "0.5.0-beta.4"
|
||||
description = "Routing and runtime macros for Actix Web"
|
||||
readme = "README.md"
|
||||
homepage = "https://actix.rs"
|
||||
|
@ -17,13 +17,13 @@ proc-macro = true
|
|||
quote = "1"
|
||||
syn = { version = "1", features = ["full", "parsing"] }
|
||||
proc-macro2 = "1"
|
||||
actix-router = "0.5.0-beta.1"
|
||||
actix-router = "0.5.0-beta.2"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.2"
|
||||
actix-test = "0.1.0-beta.3"
|
||||
actix-utils = "3.0.0"
|
||||
actix-web = "4.0.0-beta.8"
|
||||
actix-web = "4.0.0-beta.9"
|
||||
|
||||
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
|
||||
trybuild = "1"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> Routing and runtime macros for Actix Web.
|
||||
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://docs.rs/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3)
|
||||
[](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4)
|
||||
[](https://crates.io/crates/actix-web-codegen)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -3,6 +3,13 @@
|
|||
## Unreleased - 2021-xx-xx
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-09-09
|
||||
### Changed
|
||||
* Send headers within the redirect requests. [#2310]
|
||||
|
||||
[#2310]: https://github.com/actix/actix-web/pull/2310
|
||||
|
||||
|
||||
## 3.0.0-beta.7 - 2021-06-26
|
||||
### Changed
|
||||
* Change compression algorithm features flags. [#2250]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "awc"
|
||||
version = "3.0.0-beta.7"
|
||||
version = "3.0.0-beta.8"
|
||||
authors = [
|
||||
"Nikolay Kim <fafhrd91@gmail.com>",
|
||||
"fakeshadow <24548779@qq.com>",
|
||||
|
@ -55,7 +55,7 @@ __compress = []
|
|||
[dependencies]
|
||||
actix-codec = "0.4.0"
|
||||
actix-service = "2.0.0"
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-http = "3.0.0-beta.10"
|
||||
actix-rt = { version = "2.1", default-features = false }
|
||||
|
||||
base64 = "0.13"
|
||||
|
@ -77,9 +77,9 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
|
|||
tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-web = { version = "4.0.0-beta.8", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.8", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] }
|
||||
actix-web = { version = "4.0.0-beta.9", features = ["openssl"] }
|
||||
actix-http = { version = "3.0.0-beta.10", features = ["openssl"] }
|
||||
actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
|
||||
actix-utils = "3.0.0"
|
||||
actix-server = "2.0.0-beta.3"
|
||||
actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] }
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
> Async HTTP and WebSocket client library.
|
||||
|
||||
[](https://crates.io/crates/awc)
|
||||
[](https://docs.rs/awc/3.0.0-beta.7)
|
||||
[](https://docs.rs/awc/3.0.0-beta.8)
|
||||

|
||||
[](https://deps.rs/crate/awc/3.0.0-beta.7)
|
||||
[](https://deps.rs/crate/awc/3.0.0-beta.8)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
## Documentation & Resources
|
||||
|
|
|
@ -85,10 +85,12 @@ where
|
|||
let max_redirect_times = self.max_redirect_times;
|
||||
|
||||
// backup the uri and method for reuse schema and authority.
|
||||
let (uri, method) = match head {
|
||||
RequestHeadType::Owned(ref head) => (head.uri.clone(), head.method.clone()),
|
||||
let (uri, method, headers) = match head {
|
||||
RequestHeadType::Owned(ref head) => {
|
||||
(head.uri.clone(), head.method.clone(), head.headers.clone())
|
||||
}
|
||||
RequestHeadType::Rc(ref head, ..) => {
|
||||
(head.uri.clone(), head.method.clone())
|
||||
(head.uri.clone(), head.method.clone(), head.headers.clone())
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -104,6 +106,7 @@ where
|
|||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
method: Some(method),
|
||||
headers: Some(headers),
|
||||
body: body_opt,
|
||||
addr,
|
||||
connector: Some(connector),
|
||||
|
@ -127,9 +130,10 @@ pin_project_lite::pin_project! {
|
|||
max_redirect_times: u8,
|
||||
uri: Option<Uri>,
|
||||
method: Option<Method>,
|
||||
headers: Option<header::HeaderMap>,
|
||||
body: Option<Bytes>,
|
||||
addr: Option<SocketAddr>,
|
||||
connector: Option<Rc<S>>
|
||||
connector: Option<Rc<S>>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -148,6 +152,7 @@ where
|
|||
max_redirect_times,
|
||||
uri,
|
||||
method,
|
||||
headers,
|
||||
body,
|
||||
addr,
|
||||
connector,
|
||||
|
@ -156,79 +161,60 @@ where
|
|||
StatusCode::MOVED_PERMANENTLY
|
||||
| StatusCode::FOUND
|
||||
| StatusCode::SEE_OTHER
|
||||
| StatusCode::TEMPORARY_REDIRECT
|
||||
| StatusCode::PERMANENT_REDIRECT
|
||||
if *max_redirect_times > 0 =>
|
||||
{
|
||||
let org_uri = uri.take().unwrap();
|
||||
// rebuild uri from the location header value.
|
||||
let uri = rebuild_uri(&res, org_uri)?;
|
||||
let is_redirect = res.head().status == StatusCode::TEMPORARY_REDIRECT
|
||||
|| res.head().status == StatusCode::PERMANENT_REDIRECT;
|
||||
|
||||
// reset method
|
||||
let method = method.take().unwrap();
|
||||
let method = match method {
|
||||
Method::GET | Method::HEAD => method,
|
||||
_ => Method::GET,
|
||||
};
|
||||
let prev_uri = uri.take().unwrap();
|
||||
|
||||
// rebuild uri from the location header value.
|
||||
let next_uri = build_next_uri(&res, &prev_uri)?;
|
||||
|
||||
// take ownership of states that could be reused
|
||||
let addr = addr.take();
|
||||
let connector = connector.take();
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
|
||||
// use a new request head.
|
||||
let mut head = RequestHead::default();
|
||||
head.uri = uri.clone();
|
||||
head.method = method.clone();
|
||||
|
||||
let head = RequestHeadType::Owned(head);
|
||||
|
||||
max_redirect_times -= 1;
|
||||
|
||||
let fut = connector
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
// remove body
|
||||
.call(ConnectRequest::Client(head, Body::None, addr));
|
||||
|
||||
self.set(RedirectServiceFuture::Client {
|
||||
fut,
|
||||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
method: Some(method),
|
||||
// body is dropped on 301,302,303
|
||||
body: None,
|
||||
addr,
|
||||
connector,
|
||||
});
|
||||
|
||||
self.poll(cx)
|
||||
// reset method
|
||||
let method = if is_redirect {
|
||||
method.take().unwrap()
|
||||
} else {
|
||||
let method = method.take().unwrap();
|
||||
match method {
|
||||
Method::GET | Method::HEAD => method,
|
||||
_ => Method::GET,
|
||||
}
|
||||
StatusCode::TEMPORARY_REDIRECT | StatusCode::PERMANENT_REDIRECT
|
||||
if *max_redirect_times > 0 =>
|
||||
{
|
||||
let org_uri = uri.take().unwrap();
|
||||
// rebuild uri from the location header value.
|
||||
let uri = rebuild_uri(&res, org_uri)?;
|
||||
};
|
||||
|
||||
let mut body = body.take();
|
||||
let body_new = if is_redirect {
|
||||
// try to reuse body
|
||||
let body = body.take();
|
||||
let body_new = match body {
|
||||
match body {
|
||||
Some(ref bytes) => Body::Bytes(bytes.clone()),
|
||||
// TODO: should this be Body::Empty or Body::None.
|
||||
_ => Body::Empty,
|
||||
}
|
||||
} else {
|
||||
body = None;
|
||||
// remove body
|
||||
Body::None
|
||||
};
|
||||
|
||||
let addr = addr.take();
|
||||
let method = method.take().unwrap();
|
||||
let connector = connector.take();
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
let mut headers = headers.take().unwrap();
|
||||
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
|
||||
// use a new request head.
|
||||
let mut head = RequestHead::default();
|
||||
head.uri = uri.clone();
|
||||
head.uri = next_uri.clone();
|
||||
head.method = method.clone();
|
||||
head.headers = headers.clone();
|
||||
|
||||
let head = RequestHeadType::Owned(head);
|
||||
|
||||
let mut max_redirect_times = *max_redirect_times;
|
||||
max_redirect_times -= 1;
|
||||
|
||||
let fut = connector
|
||||
|
@ -239,8 +225,9 @@ where
|
|||
self.set(RedirectServiceFuture::Client {
|
||||
fut,
|
||||
max_redirect_times,
|
||||
uri: Some(uri),
|
||||
uri: Some(next_uri),
|
||||
method: Some(method),
|
||||
headers: Some(headers),
|
||||
body,
|
||||
addr,
|
||||
connector,
|
||||
|
@ -256,7 +243,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestError> {
|
||||
fn build_next_uri(res: &ClientResponse, prev_uri: &Uri) -> Result<Uri, SendRequestError> {
|
||||
let uri = res
|
||||
.headers()
|
||||
.get(header::LOCATION)
|
||||
|
@ -266,8 +253,8 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
|
|||
.map_err(|e| SendRequestError::Url(InvalidUrl::HttpError(e.into())))?;
|
||||
if uri.scheme().is_none() || uri.authority().is_none() {
|
||||
let uri = Uri::builder()
|
||||
.scheme(org_uri.scheme().cloned().unwrap())
|
||||
.authority(org_uri.authority().cloned().unwrap())
|
||||
.scheme(prev_uri.scheme().cloned().unwrap())
|
||||
.authority(prev_uri.authority().cloned().unwrap())
|
||||
.path_and_query(value.as_bytes())
|
||||
.build()?;
|
||||
Ok::<_, SendRequestError>(uri)
|
||||
|
@ -281,12 +268,25 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
|
|||
Ok(uri)
|
||||
}
|
||||
|
||||
fn remove_sensitive_headers(headers: &mut header::HeaderMap, prev_uri: &Uri, next_uri: &Uri) {
|
||||
if next_uri.host() != prev_uri.host()
|
||||
|| next_uri.port() != prev_uri.port()
|
||||
|| next_uri.scheme() != prev_uri.scheme()
|
||||
{
|
||||
headers.remove(header::COOKIE);
|
||||
headers.remove(header::AUTHORIZATION);
|
||||
headers.remove(header::PROXY_AUTHORIZATION);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use actix_web::{web, App, Error, HttpResponse};
|
||||
use actix_web::{web, App, Error, HttpRequest, HttpResponse};
|
||||
|
||||
use super::*;
|
||||
use crate::http::HeaderValue;
|
||||
use crate::ClientBuilder;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_basic_redirect() {
|
||||
|
@ -347,4 +347,239 @@ mod tests {
|
|||
|
||||
assert_eq!(res.status().as_u16(), 302);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_status_kind_307_308() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root() -> HttpResponse {
|
||||
HttpResponse::TemporaryRedirect()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
|
||||
if req.method() == Method::POST && !body.is_empty() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let res = srv.post("/").send_body("Hello").await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_status_kind_301_302_303() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root() -> HttpResponse {
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
|
||||
if (req.method() == Method::GET || req.method() == Method::HEAD)
|
||||
&& body.is_empty()
|
||||
{
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let res = srv.post("/").send_body("Hello").await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
let res = srv.post("/").send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_headers() {
|
||||
let srv = actix_test::start(|| {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
if req
|
||||
.headers()
|
||||
.get("custom")
|
||||
.unwrap_or(&HeaderValue::from_str("").unwrap())
|
||||
== "value"
|
||||
{
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test"))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test(req: HttpRequest) -> HttpResponse {
|
||||
if req
|
||||
.headers()
|
||||
.get("custom")
|
||||
.unwrap_or(&HeaderValue::from_str("").unwrap())
|
||||
== "value"
|
||||
{
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test").route(web::to(test)))
|
||||
});
|
||||
|
||||
let client = ClientBuilder::new()
|
||||
.header("custom", "value")
|
||||
.disable_redirects()
|
||||
.finish();
|
||||
let res = client.get(srv.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 302);
|
||||
|
||||
let client = ClientBuilder::new().header("custom", "value").finish();
|
||||
let res = client.get(srv.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
let client = ClientBuilder::new().finish();
|
||||
let res = client
|
||||
.get(srv.url("/"))
|
||||
.insert_header(("custom", "value"))
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_redirect_cross_origin_headers() {
|
||||
// defining two services to have two different origins
|
||||
let srv2 = actix_test::start(|| {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_none() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new().service(web::resource("/").route(web::to(root)))
|
||||
});
|
||||
let srv2_port: u16 = srv2.addr().port();
|
||||
|
||||
let srv1 = actix_test::start(move || {
|
||||
async fn root(req: HttpRequest) -> HttpResponse {
|
||||
let port = *req.app_data::<u16>().unwrap();
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Found()
|
||||
.append_header((
|
||||
"location",
|
||||
format!("http://localhost:{}/", port).as_str(),
|
||||
))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test1(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Found()
|
||||
.append_header(("location", "/test2"))
|
||||
.finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
async fn test2(req: HttpRequest) -> HttpResponse {
|
||||
if req.headers().get(header::AUTHORIZATION).is_some() {
|
||||
HttpResponse::Ok().finish()
|
||||
} else {
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
|
||||
App::new()
|
||||
.app_data(srv2_port)
|
||||
.service(web::resource("/").route(web::to(root)))
|
||||
.service(web::resource("/test1").route(web::to(test1)))
|
||||
.service(web::resource("/test2").route(web::to(test2)))
|
||||
});
|
||||
|
||||
// send a request to different origins, http://srv1/ then http://srv2/. So it should remove the header
|
||||
let client = ClientBuilder::new()
|
||||
.header(header::AUTHORIZATION, "auth_key_value")
|
||||
.finish();
|
||||
let res = client.get(srv1.url("/")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
|
||||
// send a request to same origin, http://srv1/test1 then http://srv1/test2. So it should NOT remove any header
|
||||
let res = client.get(srv1.url("/test1")).send().await.unwrap();
|
||||
assert_eq!(res.status().as_u16(), 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_remove_sensitive_headers() {
|
||||
fn gen_headers() -> header::HeaderMap {
|
||||
let mut headers = header::HeaderMap::new();
|
||||
headers.insert(header::USER_AGENT, HeaderValue::from_str("value").unwrap());
|
||||
headers.insert(
|
||||
header::AUTHORIZATION,
|
||||
HeaderValue::from_str("value").unwrap(),
|
||||
);
|
||||
headers.insert(
|
||||
header::PROXY_AUTHORIZATION,
|
||||
HeaderValue::from_str("value").unwrap(),
|
||||
);
|
||||
headers.insert(header::COOKIE, HeaderValue::from_str("value").unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
// Same origin
|
||||
let prev_uri = Uri::from_str("https://host/path1").unwrap();
|
||||
let next_uri = Uri::from_str("https://host/path2").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 4);
|
||||
|
||||
// different schema
|
||||
let prev_uri = Uri::from_str("http://host/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different host
|
||||
let prev_uri = Uri::from_str("https://host1/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host2/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different port
|
||||
let prev_uri = Uri::from_str("https://host:12/").unwrap();
|
||||
let next_uri = Uri::from_str("https://host:23/").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
|
||||
// different everything!
|
||||
let prev_uri = Uri::from_str("http://host1:12/path1").unwrap();
|
||||
let next_uri = Uri::from_str("https://host2:23/path2").unwrap();
|
||||
let mut headers = gen_headers();
|
||||
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
|
||||
assert_eq!(headers.len(), 1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ where
|
|||
.into_iter()
|
||||
.for_each(|mut srv| srv.register(&mut config));
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let (config, services) = config.into_services();
|
||||
|
||||
|
@ -104,7 +104,7 @@ where
|
|||
|
||||
// complete ResourceMap tree creation
|
||||
let rmap = Rc::new(rmap);
|
||||
rmap.finish(rmap.clone());
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
// construct all async data factory futures
|
||||
let factory_futs = join_all(self.async_data_factories.iter().map(|f| f()));
|
||||
|
|
|
@ -18,7 +18,7 @@ pub use actix_http::body::{AnyBody, Body, BodySize, MessageBody, ResponseBody, S
|
|||
|
||||
#[cfg(feature = "__compress")]
|
||||
pub use actix_http::encoding::Decoder as Decompress;
|
||||
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, ResponseHead};
|
||||
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, Response, ResponseHead};
|
||||
pub use actix_router::{Path, ResourceDef, ResourcePath, Url};
|
||||
pub use actix_server::Server;
|
||||
pub use actix_service::{
|
||||
|
@ -26,7 +26,7 @@ pub use actix_service::{
|
|||
};
|
||||
|
||||
use crate::http::header::ContentEncoding;
|
||||
use actix_http::{Response, ResponseBuilder};
|
||||
use actix_http::ResponseBuilder;
|
||||
|
||||
use actix_router::Patterns;
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
//! # References
|
||||
//!
|
||||
//! "The Content-Disposition Header Field" https://www.ietf.org/rfc/rfc2183.txt
|
||||
//! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" https://www.ietf.org/rfc/rfc6266.txt
|
||||
//! "Returning Values from Forms: multipart/form-data" https://www.ietf.org/rfc/rfc7578.txt
|
||||
//! Browser conformance tests at: http://greenbytes.de/tech/tc2231/
|
||||
//! IANA assignment: http://www.iana.org/assignments/cont-disp/cont-disp.xhtml
|
||||
//! "The Content-Disposition Header Field" <https://www.ietf.org/rfc/rfc2183.txt>
|
||||
//! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" <https://www.ietf.org/rfc/rfc6266.txt>
|
||||
//! "Returning Values from Forms: multipart/form-data" <https://www.ietf.org/rfc/rfc7578.txt>
|
||||
//! Browser conformance tests at: <http://greenbytes.de/tech/tc2231/>
|
||||
//! IANA assignment: <http://www.iana.org/assignments/cont-disp/cont-disp.xhtml>
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
|
|
@ -96,7 +96,6 @@ pub mod test;
|
|||
pub(crate) mod types;
|
||||
pub mod web;
|
||||
|
||||
pub use actix_http::Response as BaseHttpResponse;
|
||||
pub use actix_http::{body, HttpMessage};
|
||||
#[doc(inline)]
|
||||
pub use actix_rt as rt;
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
|
||||
use std::{
|
||||
cmp,
|
||||
convert::TryFrom,
|
||||
future::Future,
|
||||
marker::PhantomData,
|
||||
pin::Pin,
|
||||
str::FromStr,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
|
@ -13,16 +13,18 @@ use actix_http::{
|
|||
body::{MessageBody, ResponseBody},
|
||||
encoding::Encoder,
|
||||
http::header::{ContentEncoding, ACCEPT_ENCODING},
|
||||
StatusCode,
|
||||
};
|
||||
use actix_service::{Service, Transform};
|
||||
use actix_utils::future::{ok, Ready};
|
||||
use actix_utils::future::{ok, Either, Ready};
|
||||
use futures_core::ready;
|
||||
use once_cell::sync::Lazy;
|
||||
use pin_project::pin_project;
|
||||
|
||||
use crate::{
|
||||
dev::BodyEncoding,
|
||||
service::{ServiceRequest, ServiceResponse},
|
||||
Error,
|
||||
Error, HttpResponse,
|
||||
};
|
||||
|
||||
/// Middleware for compressing response payloads.
|
||||
|
@ -78,34 +80,78 @@ pub struct CompressMiddleware<S> {
|
|||
encoding: ContentEncoding,
|
||||
}
|
||||
|
||||
static SUPPORTED_ALGORITHM_NAMES: Lazy<String> = Lazy::new(|| {
|
||||
let mut encoding = vec![];
|
||||
|
||||
#[cfg(feature = "compress-brotli")]
|
||||
{
|
||||
encoding.push("br");
|
||||
}
|
||||
|
||||
#[cfg(feature = "compress-gzip")]
|
||||
{
|
||||
encoding.push("gzip");
|
||||
encoding.push("deflate");
|
||||
}
|
||||
|
||||
#[cfg(feature = "compress-zstd")]
|
||||
encoding.push("zstd");
|
||||
|
||||
assert!(
|
||||
!encoding.is_empty(),
|
||||
"encoding can not be empty unless __compress feature has been explicitly enabled by itself"
|
||||
);
|
||||
|
||||
encoding.join(", ")
|
||||
});
|
||||
|
||||
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
|
||||
where
|
||||
B: MessageBody,
|
||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
B: MessageBody,
|
||||
{
|
||||
type Response = ServiceResponse<ResponseBody<Encoder<B>>>;
|
||||
type Error = Error;
|
||||
type Future = CompressResponse<S, B>;
|
||||
type Future = Either<CompressResponse<S, B>, Ready<Result<Self::Response, Self::Error>>>;
|
||||
|
||||
actix_service::forward_ready!(service);
|
||||
|
||||
#[allow(clippy::borrow_interior_mutable_const)]
|
||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||
// negotiate content-encoding
|
||||
let encoding = if let Some(val) = req.headers().get(&ACCEPT_ENCODING) {
|
||||
if let Ok(enc) = val.to_str() {
|
||||
AcceptEncoding::parse(enc, self.encoding)
|
||||
} else {
|
||||
ContentEncoding::Identity
|
||||
}
|
||||
} else {
|
||||
ContentEncoding::Identity
|
||||
};
|
||||
let encoding_result = req
|
||||
.headers()
|
||||
.get(&ACCEPT_ENCODING)
|
||||
.and_then(|val| val.to_str().ok())
|
||||
.map(|enc| AcceptEncoding::try_parse(enc, self.encoding));
|
||||
|
||||
CompressResponse {
|
||||
match encoding_result {
|
||||
// Missing header => fallback to identity
|
||||
None => Either::left(CompressResponse {
|
||||
encoding: ContentEncoding::Identity,
|
||||
fut: self.service.call(req),
|
||||
_phantom: PhantomData,
|
||||
}),
|
||||
|
||||
// Valid encoding
|
||||
Some(Ok(encoding)) => Either::left(CompressResponse {
|
||||
encoding,
|
||||
fut: self.service.call(req),
|
||||
_phantom: PhantomData,
|
||||
}),
|
||||
|
||||
// There is an HTTP header but we cannot match what client as asked for
|
||||
Some(Err(_)) => {
|
||||
let res = HttpResponse::with_body(
|
||||
StatusCode::NOT_ACCEPTABLE,
|
||||
SUPPORTED_ALGORITHM_NAMES.as_str(),
|
||||
);
|
||||
let enc = ContentEncoding::Identity;
|
||||
|
||||
Either::right(ok(req.into_response(res.map_body(move |head, body| {
|
||||
Encoder::response(enc, head, ResponseBody::Other(body.into()))
|
||||
}))))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -114,7 +160,6 @@ where
|
|||
pub struct CompressResponse<S, B>
|
||||
where
|
||||
S: Service<ServiceRequest>,
|
||||
B: MessageBody,
|
||||
{
|
||||
#[pin]
|
||||
fut: S::Future,
|
||||
|
@ -151,6 +196,7 @@ where
|
|||
|
||||
struct AcceptEncoding {
|
||||
encoding: ContentEncoding,
|
||||
// TODO: use Quality or QualityItem<ContentEncoding>
|
||||
quality: f64,
|
||||
}
|
||||
|
||||
|
@ -177,26 +223,56 @@ impl PartialOrd for AcceptEncoding {
|
|||
|
||||
impl PartialEq for AcceptEncoding {
|
||||
fn eq(&self, other: &AcceptEncoding) -> bool {
|
||||
self.quality == other.quality
|
||||
self.encoding == other.encoding && self.quality == other.quality
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse q-factor from quality strings.
|
||||
///
|
||||
/// If parse fail, then fallback to default value which is 1.
|
||||
/// More details available here: <https://developer.mozilla.org/en-US/docs/Glossary/Quality_values>
|
||||
fn parse_quality(parts: &[&str]) -> f64 {
|
||||
for part in parts {
|
||||
if part.trim().starts_with("q=") {
|
||||
return part[2..].parse().unwrap_or(1.0);
|
||||
}
|
||||
}
|
||||
|
||||
1.0
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum AcceptEncodingError {
|
||||
/// This error occurs when client only support compressed response and server do not have any
|
||||
/// algorithm that match client accepted algorithms.
|
||||
CompressionAlgorithmMismatch,
|
||||
}
|
||||
|
||||
impl AcceptEncoding {
|
||||
fn new(tag: &str) -> Option<AcceptEncoding> {
|
||||
let parts: Vec<&str> = tag.split(';').collect();
|
||||
let encoding = match parts.len() {
|
||||
0 => return None,
|
||||
_ => ContentEncoding::from(parts[0]),
|
||||
};
|
||||
let quality = match parts.len() {
|
||||
1 => encoding.quality(),
|
||||
_ => f64::from_str(parts[1]).unwrap_or(0.0),
|
||||
_ => match ContentEncoding::try_from(parts[0]) {
|
||||
Err(_) => return None,
|
||||
Ok(x) => x,
|
||||
},
|
||||
};
|
||||
|
||||
let quality = parse_quality(&parts[1..]);
|
||||
if quality <= 0.0 || quality > 1.0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(AcceptEncoding { encoding, quality })
|
||||
}
|
||||
|
||||
/// Parse a raw Accept-Encoding header value into an ordered list.
|
||||
pub fn parse(raw: &str, encoding: ContentEncoding) -> ContentEncoding {
|
||||
/// Parse a raw Accept-Encoding header value into an ordered list then return the best match
|
||||
/// based on middleware configuration.
|
||||
pub fn try_parse(
|
||||
raw: &str,
|
||||
encoding: ContentEncoding,
|
||||
) -> Result<ContentEncoding, AcceptEncodingError> {
|
||||
let mut encodings = raw
|
||||
.replace(' ', "")
|
||||
.split(',')
|
||||
|
@ -206,13 +282,90 @@ impl AcceptEncoding {
|
|||
encodings.sort();
|
||||
|
||||
for enc in encodings {
|
||||
if encoding == ContentEncoding::Auto {
|
||||
return enc.encoding;
|
||||
} else if encoding == enc.encoding {
|
||||
return encoding;
|
||||
if encoding == ContentEncoding::Auto || encoding == enc.encoding {
|
||||
return Ok(enc.encoding);
|
||||
}
|
||||
}
|
||||
|
||||
ContentEncoding::Identity
|
||||
// Special case if user cannot accept uncompressed data.
|
||||
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding
|
||||
// TODO: account for whitespace
|
||||
if raw.contains("*;q=0") || raw.contains("identity;q=0") {
|
||||
return Err(AcceptEncodingError::CompressionAlgorithmMismatch);
|
||||
}
|
||||
|
||||
Ok(ContentEncoding::Identity)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
macro_rules! assert_parse_eq {
|
||||
($raw:expr, $result:expr) => {
|
||||
assert_eq!(
|
||||
AcceptEncoding::try_parse($raw, ContentEncoding::Auto),
|
||||
Ok($result)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! assert_parse_fail {
|
||||
($raw:expr) => {
|
||||
assert!(AcceptEncoding::try_parse($raw, ContentEncoding::Auto).is_err());
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_encoding() {
|
||||
// Test simple case
|
||||
assert_parse_eq!("br", ContentEncoding::Br);
|
||||
assert_parse_eq!("gzip", ContentEncoding::Gzip);
|
||||
assert_parse_eq!("deflate", ContentEncoding::Deflate);
|
||||
assert_parse_eq!("zstd", ContentEncoding::Zstd);
|
||||
|
||||
// Test space, trim, missing values
|
||||
assert_parse_eq!("br,,,,", ContentEncoding::Br);
|
||||
assert_parse_eq!("gzip , br, zstd", ContentEncoding::Gzip);
|
||||
|
||||
// Test float number parsing
|
||||
assert_parse_eq!("br;q=1 ,", ContentEncoding::Br);
|
||||
assert_parse_eq!("br;q=1.0 , br", ContentEncoding::Br);
|
||||
|
||||
// Test wildcard
|
||||
assert_parse_eq!("*", ContentEncoding::Identity);
|
||||
assert_parse_eq!("*;q=1.0", ContentEncoding::Identity);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_encoding_qfactor_ordering() {
|
||||
assert_parse_eq!("gzip, br, zstd", ContentEncoding::Gzip);
|
||||
assert_parse_eq!("zstd, br, gzip", ContentEncoding::Zstd);
|
||||
|
||||
assert_parse_eq!("gzip;q=0.4, br;q=0.6", ContentEncoding::Br);
|
||||
assert_parse_eq!("gzip;q=0.8, br;q=0.4", ContentEncoding::Gzip);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_encoding_qfactor_invalid() {
|
||||
// Out of range
|
||||
assert_parse_eq!("gzip;q=-5.0", ContentEncoding::Identity);
|
||||
assert_parse_eq!("gzip;q=5.0", ContentEncoding::Identity);
|
||||
|
||||
// Disabled
|
||||
assert_parse_eq!("gzip;q=0", ContentEncoding::Identity);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_compression_required() {
|
||||
// Check we fallback to identity if there is an unsupported compression algorithm
|
||||
assert_parse_eq!("compress", ContentEncoding::Identity);
|
||||
|
||||
// User do not want any compression
|
||||
assert_parse_fail!("compress, identity;q=0");
|
||||
assert_parse_fail!("compress, identity;q=0.0");
|
||||
assert_parse_fail!("compress, *;q=0");
|
||||
assert_parse_fail!("compress, *;q=0.0");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ use bytes::Bytes;
|
|||
use futures_core::ready;
|
||||
use log::{debug, warn};
|
||||
use regex::{Regex, RegexSet};
|
||||
use time::OffsetDateTime;
|
||||
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
|
||||
|
||||
use crate::{
|
||||
dev::{BodySize, MessageBody},
|
||||
|
@ -538,7 +538,7 @@ impl FormatText {
|
|||
};
|
||||
}
|
||||
FormatText::UrlPath => *self = FormatText::Str(req.path().to_string()),
|
||||
FormatText::RequestTime => *self = FormatText::Str(now.format("%Y-%m-%dT%H:%M:%S")),
|
||||
FormatText::RequestTime => *self = FormatText::Str(now.format(&Rfc3339).unwrap()),
|
||||
FormatText::RequestHeader(ref name) => {
|
||||
let s = if let Some(val) = req.headers().get(name) {
|
||||
if let Ok(s) = val.to_str() {
|
||||
|
@ -767,7 +767,7 @@ mod tests {
|
|||
Ok(())
|
||||
};
|
||||
let s = format!("{}", FormatDisplay(&render));
|
||||
assert!(s.contains(&now.format("%Y-%m-%dT%H:%M:%S")));
|
||||
assert!(s.contains(&now.format(&Rfc3339).unwrap()));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
|
|
@ -19,3 +19,43 @@ mod compress;
|
|||
|
||||
#[cfg(feature = "__compress")]
|
||||
pub use self::compress::Compress;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{http::StatusCode, App};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn common_combinations() {
|
||||
// ensure there's no reason that the built-in middleware cannot compose
|
||||
|
||||
let _ = App::new()
|
||||
.wrap(Compat::new(Logger::default()))
|
||||
.wrap(Condition::new(true, DefaultHeaders::new()))
|
||||
.wrap(DefaultHeaders::new().header("X-Test2", "X-Value2"))
|
||||
.wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| {
|
||||
Ok(ErrorHandlerResponse::Response(res))
|
||||
}))
|
||||
.wrap(Logger::default())
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim));
|
||||
|
||||
let _ = App::new()
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||
.wrap(Logger::default())
|
||||
.wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| {
|
||||
Ok(ErrorHandlerResponse::Response(res))
|
||||
}))
|
||||
.wrap(DefaultHeaders::new().header("X-Test2", "X-Value2"))
|
||||
.wrap(Condition::new(true, DefaultHeaders::new()))
|
||||
.wrap(Compat::new(Logger::default()));
|
||||
|
||||
#[cfg(feature = "__compress")]
|
||||
{
|
||||
let _ = App::new().wrap(Compress::default()).wrap(Logger::default());
|
||||
let _ = App::new().wrap(Logger::default()).wrap(Compress::default());
|
||||
let _ = App::new().wrap(Compat::new(Compress::default()));
|
||||
let _ = App::new().wrap(Condition::new(true, Compat::new(Compress::default())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ impl Default for TrailingSlash {
|
|||
///
|
||||
/// # actix_web::rt::System::new().block_on(async {
|
||||
/// let app = App::new()
|
||||
/// .wrap(middleware::NormalizePath::default())
|
||||
/// .wrap(middleware::NormalizePath::trim())
|
||||
/// .route("/test", web::get().to(|| async { "test" }))
|
||||
/// .route("/unmatchable/", web::get().to(|| async { "unmatchable" }));
|
||||
///
|
||||
|
@ -85,13 +85,31 @@ impl Default for TrailingSlash {
|
|||
/// assert_eq!(res.status(), StatusCode::NOT_FOUND);
|
||||
/// # })
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct NormalizePath(TrailingSlash);
|
||||
|
||||
impl Default for NormalizePath {
|
||||
fn default() -> Self {
|
||||
log::warn!(
|
||||
"`NormalizePath::default()` is deprecated. The default trailing slash behavior changed \
|
||||
in v4 from `Always` to `Trim`. Update your call to `NormalizePath::new(...)`."
|
||||
);
|
||||
|
||||
Self(TrailingSlash::Trim)
|
||||
}
|
||||
}
|
||||
|
||||
impl NormalizePath {
|
||||
/// Create new `NormalizePath` middleware with the specified trailing slash style.
|
||||
pub fn new(trailing_slash_style: TrailingSlash) -> Self {
|
||||
NormalizePath(trailing_slash_style)
|
||||
Self(trailing_slash_style)
|
||||
}
|
||||
|
||||
/// Constructs a new `NormalizePath` middleware with [trim](TrailingSlash::Trim) semantics.
|
||||
///
|
||||
/// Use this instead of `NormalizePath::default()` to avoid deprecation warning.
|
||||
pub fn trim() -> Self {
|
||||
Self::new(TrailingSlash::Trim)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -510,7 +510,7 @@ mod tests {
|
|||
let mut res = ResourceDef::new("/user/{name}.{ext}");
|
||||
res.set_name("index");
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
rmap.add(&mut res, None);
|
||||
assert!(rmap.has_resource("/user/test.html"));
|
||||
assert!(!rmap.has_resource("/test/unknown"));
|
||||
|
@ -540,7 +540,7 @@ mod tests {
|
|||
let mut rdef = ResourceDef::new("/index.html");
|
||||
rdef.set_name("index");
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
rmap.add(&mut rdef, None);
|
||||
|
||||
assert!(rmap.has_resource("/index.html"));
|
||||
|
@ -561,7 +561,7 @@ mod tests {
|
|||
let mut rdef = ResourceDef::new("/index.html");
|
||||
rdef.set_name("index");
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
rmap.add(&mut rdef, None);
|
||||
|
||||
assert!(rmap.has_resource("/index.html"));
|
||||
|
@ -580,9 +580,8 @@ mod tests {
|
|||
|
||||
rdef.set_name("youtube");
|
||||
|
||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
||||
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||
rmap.add(&mut rdef, None);
|
||||
assert!(rmap.has_resource("https://youtube.com/watch/unknown"));
|
||||
|
||||
let req = TestRequest::default().rmap(rmap).to_http_request();
|
||||
let url = req.url_for("youtube", &["oHg5SJYRHA0"]);
|
||||
|
|
396
src/rmap.rs
396
src/rmap.rs
|
@ -10,43 +10,75 @@ use crate::request::HttpRequest;
|
|||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ResourceMap {
|
||||
root: ResourceDef,
|
||||
pattern: ResourceDef,
|
||||
|
||||
/// Named resources within the tree or, for external resources,
|
||||
/// it points to isolated nodes outside the tree.
|
||||
named: AHashMap<String, Rc<ResourceMap>>,
|
||||
|
||||
parent: RefCell<Weak<ResourceMap>>,
|
||||
named: AHashMap<String, ResourceDef>,
|
||||
patterns: Vec<(ResourceDef, Option<Rc<ResourceMap>>)>,
|
||||
|
||||
/// Must be `None` for "edge" nodes.
|
||||
nodes: Option<Vec<Rc<ResourceMap>>>,
|
||||
}
|
||||
|
||||
impl ResourceMap {
|
||||
/// Creates a _container_ node in the `ResourceMap` tree.
|
||||
pub fn new(root: ResourceDef) -> Self {
|
||||
ResourceMap {
|
||||
root,
|
||||
parent: RefCell::new(Weak::new()),
|
||||
pattern: root,
|
||||
named: AHashMap::default(),
|
||||
patterns: Vec::new(),
|
||||
parent: RefCell::new(Weak::new()),
|
||||
nodes: Some(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a (possibly nested) resource.
|
||||
///
|
||||
/// To add a non-prefix pattern, `nested` must be `None`.
|
||||
/// To add external resource, supply a pattern without a leading `/`.
|
||||
/// The root pattern of `nested`, if present, should match `pattern`.
|
||||
pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option<Rc<ResourceMap>>) {
|
||||
pattern.set_id(self.patterns.len() as u16);
|
||||
self.patterns.push((pattern.clone(), nested));
|
||||
pattern.set_id(self.nodes.as_ref().unwrap().len() as u16);
|
||||
|
||||
if let Some(new_node) = nested {
|
||||
assert_eq!(&new_node.pattern, pattern, "`patern` and `nested` mismatch");
|
||||
self.named.extend(new_node.named.clone().into_iter());
|
||||
self.nodes.as_mut().unwrap().push(new_node);
|
||||
} else {
|
||||
let new_node = Rc::new(ResourceMap {
|
||||
pattern: pattern.clone(),
|
||||
named: AHashMap::default(),
|
||||
parent: RefCell::new(Weak::new()),
|
||||
nodes: None,
|
||||
});
|
||||
|
||||
if let Some(name) = pattern.name() {
|
||||
self.named.insert(name.to_owned(), pattern.clone());
|
||||
self.named.insert(name.to_owned(), Rc::clone(&new_node));
|
||||
}
|
||||
|
||||
let is_external = match pattern.pattern() {
|
||||
Some(p) => !p.is_empty() && !p.starts_with('/'),
|
||||
None => false,
|
||||
};
|
||||
|
||||
// Don't add external resources to the tree
|
||||
if !is_external {
|
||||
self.nodes.as_mut().unwrap().push(new_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn finish(&self, current: Rc<ResourceMap>) {
|
||||
for (_, nested) in &self.patterns {
|
||||
if let Some(ref nested) = nested {
|
||||
*nested.parent.borrow_mut() = Rc::downgrade(¤t);
|
||||
nested.finish(nested.clone());
|
||||
}
|
||||
pub(crate) fn finish(self: &Rc<Self>) {
|
||||
for node in self.nodes.iter().flatten() {
|
||||
node.parent.replace(Rc::downgrade(self));
|
||||
ResourceMap::finish(node);
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate url for named resource
|
||||
///
|
||||
/// Check [`HttpRequest::url_for()`](../struct.HttpRequest.html#method.
|
||||
/// url_for) for detailed information.
|
||||
/// Check [`HttpRequest::url_for`] for detailed information.
|
||||
pub fn url_for<U, I>(
|
||||
&self,
|
||||
req: &HttpRequest,
|
||||
|
@ -57,10 +89,19 @@ impl ResourceMap {
|
|||
U: IntoIterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
{
|
||||
let mut path = String::new();
|
||||
let mut elements = elements.into_iter();
|
||||
|
||||
if self.patterns_for(name, &mut path, &mut elements)?.is_some() {
|
||||
let path = self
|
||||
.named
|
||||
.get(name)
|
||||
.ok_or(UrlGenerationError::ResourceNotFound)?
|
||||
.root_rmap_fn(String::with_capacity(24), |mut acc, node| {
|
||||
node.pattern
|
||||
.resource_path_from_iter(&mut acc, &mut elements)
|
||||
.then(|| acc)
|
||||
})
|
||||
.ok_or(UrlGenerationError::NotEnoughElements)?;
|
||||
|
||||
if path.starts_with('/') {
|
||||
let conn = req.connection_info();
|
||||
Ok(Url::parse(&format!(
|
||||
|
@ -72,182 +113,73 @@ impl ResourceMap {
|
|||
} else {
|
||||
Ok(Url::parse(&path)?)
|
||||
}
|
||||
} else {
|
||||
Err(UrlGenerationError::ResourceNotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_resource(&self, path: &str) -> bool {
|
||||
let path = if path.is_empty() { "/" } else { path };
|
||||
|
||||
for (pattern, rmap) in &self.patterns {
|
||||
if let Some(ref rmap) = rmap {
|
||||
if let Some(pat_len) = pattern.find_match(path) {
|
||||
return rmap.has_resource(&path[pat_len..]);
|
||||
}
|
||||
} else if pattern.is_match(path) || pattern.pattern() == Some("") && path == "/" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
self.find_matching_node(path).is_some()
|
||||
}
|
||||
|
||||
/// Returns the name of the route that matches the given path or None if no full match
|
||||
/// is possible.
|
||||
/// is possible or the matching resource is not named.
|
||||
pub fn match_name(&self, path: &str) -> Option<&str> {
|
||||
let path = if path.is_empty() { "/" } else { path };
|
||||
|
||||
for (pattern, rmap) in &self.patterns {
|
||||
if let Some(ref rmap) = rmap {
|
||||
if let Some(plen) = pattern.find_match(path) {
|
||||
return rmap.match_name(&path[plen..]);
|
||||
}
|
||||
} else if pattern.is_match(path) {
|
||||
return pattern.name();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
self.find_matching_node(path)?.pattern.name()
|
||||
}
|
||||
|
||||
/// Returns the full resource pattern matched against a path or None if no full match
|
||||
/// is possible.
|
||||
pub fn match_pattern(&self, path: &str) -> Option<String> {
|
||||
let path = if path.is_empty() { "/" } else { path };
|
||||
|
||||
// ensure a full match exists
|
||||
if !self.has_resource(path) {
|
||||
return None;
|
||||
self.find_matching_node(path)?.root_rmap_fn(
|
||||
String::with_capacity(24),
|
||||
|mut acc, node| {
|
||||
acc.push_str(node.pattern.pattern()?);
|
||||
Some(acc)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
Some(self.traverse_resource_pattern(path))
|
||||
fn find_matching_node(&self, path: &str) -> Option<&ResourceMap> {
|
||||
self._find_matching_node(path).flatten()
|
||||
}
|
||||
|
||||
/// Takes remaining path and tries to match it up against a resource definition within the
|
||||
/// current resource map recursively, returning a concatenation of all resource prefixes and
|
||||
/// patterns matched in the tree.
|
||||
///
|
||||
/// Should only be used after checking the resource exists in the map so that partial match
|
||||
/// patterns are not returned.
|
||||
fn traverse_resource_pattern(&self, remaining: &str) -> String {
|
||||
for (pattern, rmap) in &self.patterns {
|
||||
if let Some(ref rmap) = rmap {
|
||||
if let Some(prefix_len) = pattern.find_match(remaining) {
|
||||
// TODO: think about unwrap_or
|
||||
let prefix = pattern.pattern().unwrap_or("").to_owned();
|
||||
/// Returns `None` if root pattern doesn't match;
|
||||
/// `Some(None)` if root pattern matches but there is no matching child pattern.
|
||||
/// Don't search sideways when `Some(none)` is returned.
|
||||
fn _find_matching_node(&self, path: &str) -> Option<Option<&ResourceMap>> {
|
||||
let matched_len = self.pattern.find_match(path)?;
|
||||
let path = &path[matched_len..];
|
||||
|
||||
return [
|
||||
prefix,
|
||||
rmap.traverse_resource_pattern(&remaining[prefix_len..]),
|
||||
]
|
||||
.concat();
|
||||
}
|
||||
} else if pattern.is_match(remaining) {
|
||||
// TODO: think about unwrap_or
|
||||
return pattern.pattern().unwrap_or("").to_owned();
|
||||
}
|
||||
Some(match &self.nodes {
|
||||
// find first sub-node to match remaining path
|
||||
Some(nodes) => nodes
|
||||
.iter()
|
||||
.filter_map(|node| node._find_matching_node(path))
|
||||
.next()
|
||||
.flatten(),
|
||||
|
||||
// only terminate at edge nodes
|
||||
None => Some(self),
|
||||
})
|
||||
}
|
||||
|
||||
String::new()
|
||||
}
|
||||
|
||||
fn patterns_for<U, I>(
|
||||
&self,
|
||||
name: &str,
|
||||
path: &mut String,
|
||||
elements: &mut U,
|
||||
) -> Result<Option<()>, UrlGenerationError>
|
||||
/// Find `self`'s highest ancestor and then run `F`, providing `B`, in that rmap context.
|
||||
fn root_rmap_fn<F, B>(&self, init: B, mut f: F) -> Option<B>
|
||||
where
|
||||
U: Iterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
F: FnMut(B, &ResourceMap) -> Option<B>,
|
||||
{
|
||||
if self.pattern_for(name, path, elements)?.is_some() {
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
self.parent_pattern_for(name, path, elements)
|
||||
}
|
||||
self._root_rmap_fn(init, &mut f)
|
||||
}
|
||||
|
||||
fn pattern_for<U, I>(
|
||||
&self,
|
||||
name: &str,
|
||||
path: &mut String,
|
||||
elements: &mut U,
|
||||
) -> Result<Option<()>, UrlGenerationError>
|
||||
/// Run `F`, providing `B`, if `self` is top-level resource map, else recurse to parent map.
|
||||
fn _root_rmap_fn<F, B>(&self, init: B, f: &mut F) -> Option<B>
|
||||
where
|
||||
U: Iterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
F: FnMut(B, &ResourceMap) -> Option<B>,
|
||||
{
|
||||
if let Some(pattern) = self.named.get(name) {
|
||||
if pattern
|
||||
.pattern()
|
||||
.map(|pat| pat.starts_with('/'))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
self.fill_root(path, elements)?;
|
||||
}
|
||||
let data = match self.parent.borrow().upgrade() {
|
||||
Some(ref parent) => parent._root_rmap_fn(init, f)?,
|
||||
None => init,
|
||||
};
|
||||
|
||||
if pattern.resource_path_from_iter(path, elements) {
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
Err(UrlGenerationError::NotEnoughElements)
|
||||
}
|
||||
} else {
|
||||
for (_, rmap) in &self.patterns {
|
||||
if let Some(ref rmap) = rmap {
|
||||
if rmap.pattern_for(name, path, elements)?.is_some() {
|
||||
return Ok(Some(()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_root<U, I>(
|
||||
&self,
|
||||
path: &mut String,
|
||||
elements: &mut U,
|
||||
) -> Result<(), UrlGenerationError>
|
||||
where
|
||||
U: Iterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
{
|
||||
if let Some(ref parent) = self.parent.borrow().upgrade() {
|
||||
parent.fill_root(path, elements)?;
|
||||
}
|
||||
|
||||
if self.root.resource_path_from_iter(path, elements) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(UrlGenerationError::NotEnoughElements)
|
||||
}
|
||||
}
|
||||
|
||||
fn parent_pattern_for<U, I>(
|
||||
&self,
|
||||
name: &str,
|
||||
path: &mut String,
|
||||
elements: &mut U,
|
||||
) -> Result<Option<()>, UrlGenerationError>
|
||||
where
|
||||
U: Iterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
{
|
||||
if let Some(ref parent) = self.parent.borrow().upgrade() {
|
||||
if let Some(pattern) = parent.named.get(name) {
|
||||
self.fill_root(path, elements)?;
|
||||
if pattern.resource_path_from_iter(path, elements) {
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
Err(UrlGenerationError::NotEnoughElements)
|
||||
}
|
||||
} else {
|
||||
parent.parent_pattern_for(name, path, elements)
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
f(data, self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,7 +191,7 @@ mod tests {
|
|||
fn extract_matched_pattern() {
|
||||
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||
user_map.add(&mut ResourceDef::new("/"), None);
|
||||
user_map.add(&mut ResourceDef::new("/profile"), None);
|
||||
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
||||
|
@ -275,9 +207,10 @@ mod tests {
|
|||
&mut ResourceDef::root_prefix("/user/{id}"),
|
||||
Some(Rc::new(user_map)),
|
||||
);
|
||||
root.add(&mut ResourceDef::new("/info"), None);
|
||||
|
||||
let root = Rc::new(root);
|
||||
root.finish(Rc::clone(&root));
|
||||
ResourceMap::finish(&root);
|
||||
|
||||
// sanity check resource map setup
|
||||
|
||||
|
@ -288,7 +221,7 @@ mod tests {
|
|||
assert!(root.has_resource("/v2"));
|
||||
assert!(!root.has_resource("/v33"));
|
||||
|
||||
assert!(root.has_resource("/user/22"));
|
||||
assert!(!root.has_resource("/user/22"));
|
||||
assert!(root.has_resource("/user/22/"));
|
||||
assert!(root.has_resource("/user/22/profile"));
|
||||
|
||||
|
@ -336,7 +269,7 @@ mod tests {
|
|||
rdef.set_name("root_info");
|
||||
root.add(&mut rdef, None);
|
||||
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||
let mut rdef = ResourceDef::new("/");
|
||||
user_map.add(&mut rdef, None);
|
||||
|
||||
|
@ -350,14 +283,14 @@ mod tests {
|
|||
);
|
||||
|
||||
let root = Rc::new(root);
|
||||
root.finish(Rc::clone(&root));
|
||||
ResourceMap::finish(&root);
|
||||
|
||||
// sanity check resource map setup
|
||||
|
||||
assert!(root.has_resource("/info"));
|
||||
assert!(!root.has_resource("/bar"));
|
||||
|
||||
assert!(root.has_resource("/user/22"));
|
||||
assert!(!root.has_resource("/user/22"));
|
||||
assert!(root.has_resource("/user/22/"));
|
||||
assert!(root.has_resource("/user/22/post/55"));
|
||||
|
||||
|
@ -377,7 +310,7 @@ mod tests {
|
|||
// ref: https://github.com/actix/actix-web/issues/1582
|
||||
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||
user_map.add(&mut ResourceDef::new("/"), None);
|
||||
user_map.add(&mut ResourceDef::new("/profile"), None);
|
||||
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
||||
|
@ -393,20 +326,119 @@ mod tests {
|
|||
);
|
||||
|
||||
let root = Rc::new(root);
|
||||
root.finish(Rc::clone(&root));
|
||||
ResourceMap::finish(&root);
|
||||
|
||||
// check root has no parent
|
||||
assert!(root.parent.borrow().upgrade().is_none());
|
||||
// check child has parent reference
|
||||
assert!(root.patterns[0].1.is_some());
|
||||
assert!(root.nodes.as_ref().unwrap()[0]
|
||||
.parent
|
||||
.borrow()
|
||||
.upgrade()
|
||||
.is_some());
|
||||
// check child's parent root id matches root's root id
|
||||
assert_eq!(
|
||||
root.patterns[0].1.as_ref().unwrap().root.id(),
|
||||
root.root.id()
|
||||
);
|
||||
assert!(Rc::ptr_eq(
|
||||
&root.nodes.as_ref().unwrap()[0]
|
||||
.parent
|
||||
.borrow()
|
||||
.upgrade()
|
||||
.unwrap(),
|
||||
&root
|
||||
));
|
||||
|
||||
let output = format!("{:?}", root);
|
||||
assert!(output.starts_with("ResourceMap {"));
|
||||
assert!(output.ends_with(" }"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn short_circuit() {
|
||||
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let mut user_root = ResourceDef::prefix("/user");
|
||||
let mut user_map = ResourceMap::new(user_root.clone());
|
||||
user_map.add(&mut ResourceDef::new("/u1"), None);
|
||||
user_map.add(&mut ResourceDef::new("/u2"), None);
|
||||
|
||||
root.add(&mut ResourceDef::new("/user/u3"), None);
|
||||
root.add(&mut user_root, Some(Rc::new(user_map)));
|
||||
root.add(&mut ResourceDef::new("/user/u4"), None);
|
||||
|
||||
let rmap = Rc::new(root);
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
assert!(rmap.has_resource("/user/u1"));
|
||||
assert!(rmap.has_resource("/user/u2"));
|
||||
assert!(rmap.has_resource("/user/u3"));
|
||||
assert!(!rmap.has_resource("/user/u4"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn url_for() {
|
||||
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let mut user_scope_rdef = ResourceDef::prefix("/user");
|
||||
let mut user_scope_map = ResourceMap::new(user_scope_rdef.clone());
|
||||
|
||||
let mut user_rdef = ResourceDef::new("/{user_id}");
|
||||
let mut user_map = ResourceMap::new(user_rdef.clone());
|
||||
|
||||
let mut post_rdef = ResourceDef::new("/post/{sub_id}");
|
||||
post_rdef.set_name("post");
|
||||
|
||||
user_map.add(&mut post_rdef, None);
|
||||
user_scope_map.add(&mut user_rdef, Some(Rc::new(user_map)));
|
||||
root.add(&mut user_scope_rdef, Some(Rc::new(user_scope_map)));
|
||||
|
||||
let rmap = Rc::new(root);
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
let mut req = crate::test::TestRequest::default();
|
||||
req.set_server_hostname("localhost:8888");
|
||||
let req = req.to_http_request();
|
||||
|
||||
let url = rmap
|
||||
.url_for(&req, "post", &["u123", "foobar"])
|
||||
.unwrap()
|
||||
.to_string();
|
||||
assert_eq!(url, "http://localhost:8888/user/u123/post/foobar");
|
||||
|
||||
assert!(rmap.url_for(&req, "missing", &["u123"]).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_resource_with_no_name() {
|
||||
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let mut rdef = ResourceDef::new("https://duck.com/{query}");
|
||||
root.add(&mut rdef, None);
|
||||
|
||||
let rmap = Rc::new(root);
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
assert!(!rmap.has_resource("https://duck.com/abc"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_resource_with_name() {
|
||||
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||
|
||||
let mut rdef = ResourceDef::new("https://duck.com/{query}");
|
||||
rdef.set_name("duck");
|
||||
root.add(&mut rdef, None);
|
||||
|
||||
let rmap = Rc::new(root);
|
||||
ResourceMap::finish(&rmap);
|
||||
|
||||
assert!(!rmap.has_resource("https://duck.com/abc"));
|
||||
|
||||
let mut req = crate::test::TestRequest::default();
|
||||
req.set_server_hostname("localhost:8888");
|
||||
let req = req.to_http_request();
|
||||
|
||||
assert_eq!(
|
||||
rmap.url_for(&req, "duck", &["abcd"]).unwrap().to_string(),
|
||||
"https://duck.com/abcd"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,9 +41,9 @@ type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Err
|
|||
/// fn main() {
|
||||
/// let app = App::new().service(
|
||||
/// web::scope("/{project_id}/")
|
||||
/// .service(web::resource("/path1").to(|| async { HttpResponse::Ok() }))
|
||||
/// .service(web::resource("/path1").to(|| async { "OK" }))
|
||||
/// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok())))
|
||||
/// .service(web::resource("/path3").route(web::head().to(|| HttpResponse::MethodNotAllowed())))
|
||||
/// .service(web::resource("/path3").route(web::head().to(HttpResponse::MethodNotAllowed)))
|
||||
/// );
|
||||
/// }
|
||||
/// ```
|
||||
|
|
|
@ -476,7 +476,7 @@ impl WebService {
|
|||
|
||||
/// Set service name.
|
||||
///
|
||||
/// Name is used for url generation.
|
||||
/// Name is used for URL generation.
|
||||
pub fn name(mut self, name: &str) -> Self {
|
||||
self.name = Some(name.to_string());
|
||||
self
|
||||
|
|
24
src/test.rs
24
src/test.rs
|
@ -1,6 +1,6 @@
|
|||
//! Various helpers for Actix applications to use during testing.
|
||||
|
||||
use std::{net::SocketAddr, rc::Rc};
|
||||
use std::{borrow::Cow, net::SocketAddr, rc::Rc};
|
||||
|
||||
pub use actix_http::test::TestBuffer;
|
||||
use actix_http::{
|
||||
|
@ -56,7 +56,7 @@ pub fn default_service(
|
|||
/// async fn test_init_service() {
|
||||
/// let app = test::init_service(
|
||||
/// App::new()
|
||||
/// .service(web::resource("/test").to(|| async { HttpResponse::Ok() }))
|
||||
/// .service(web::resource("/test").to(|| async { "OK" }))
|
||||
/// ).await;
|
||||
///
|
||||
/// // Create request object
|
||||
|
@ -470,19 +470,31 @@ impl TestRequest {
|
|||
self
|
||||
}
|
||||
|
||||
/// Set request path pattern parameter
|
||||
pub fn param(mut self, name: &'static str, value: &'static str) -> Self {
|
||||
/// Set request path pattern parameter.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use actix_web::test::TestRequest;
|
||||
///
|
||||
/// let req = TestRequest::default().param("foo", "bar");
|
||||
/// let req = TestRequest::default().param("foo".to_owned(), "bar".to_owned());
|
||||
/// ```
|
||||
pub fn param(
|
||||
mut self,
|
||||
name: impl Into<Cow<'static, str>>,
|
||||
value: impl Into<Cow<'static, str>>,
|
||||
) -> Self {
|
||||
self.path.add_static(name, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set peer addr
|
||||
/// Set peer addr.
|
||||
pub fn peer_addr(mut self, addr: SocketAddr) -> Self {
|
||||
self.peer_addr = Some(addr);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set request payload
|
||||
/// Set request payload.
|
||||
pub fn set_payload<B: Into<Bytes>>(mut self, data: B) -> Self {
|
||||
self.req.set_payload(data);
|
||||
self
|
||||
|
|
|
@ -30,7 +30,7 @@ use crate::{
|
|||
///
|
||||
/// # Extractor
|
||||
/// To extract typed data from a request body, the inner type `T` must implement the
|
||||
/// [`serde::Deserialize`] trait.
|
||||
/// [`DeserializeOwned`] trait.
|
||||
///
|
||||
/// Use [`FormConfig`] to configure extraction process.
|
||||
///
|
||||
|
|
|
@ -97,19 +97,13 @@ impl<T> ops::DerefMut for Json<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Display for Json<T>
|
||||
where
|
||||
T: fmt::Display,
|
||||
{
|
||||
impl<T: fmt::Display> fmt::Display for Json<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Serialize for Json<T>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
impl<T: Serialize> Serialize for Json<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
|
@ -133,10 +127,7 @@ impl<T: Serialize> Responder for Json<T> {
|
|||
}
|
||||
|
||||
/// See [here](#extractor) for example of usage as an extractor.
|
||||
impl<T> FromRequest for Json<T>
|
||||
where
|
||||
T: DeserializeOwned + 'static,
|
||||
{
|
||||
impl<T: DeserializeOwned + 'static> FromRequest for Json<T> {
|
||||
type Error = Error;
|
||||
type Future = JsonExtractFut<T>;
|
||||
|
||||
|
@ -165,10 +156,7 @@ pub struct JsonExtractFut<T> {
|
|||
err_handler: JsonErrorHandler,
|
||||
}
|
||||
|
||||
impl<T> Future for JsonExtractFut<T>
|
||||
where
|
||||
T: DeserializeOwned + 'static,
|
||||
{
|
||||
impl<T: DeserializeOwned + 'static> Future for JsonExtractFut<T> {
|
||||
type Output = Result<Json<T>, Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
|
@ -310,10 +298,7 @@ pub enum JsonBody<T> {
|
|||
|
||||
impl<T> Unpin for JsonBody<T> {}
|
||||
|
||||
impl<T> JsonBody<T>
|
||||
where
|
||||
T: DeserializeOwned + 'static,
|
||||
{
|
||||
impl<T: DeserializeOwned> JsonBody<T> {
|
||||
/// Create a new future to decode a JSON request payload.
|
||||
#[allow(clippy::borrow_interior_mutable_const)]
|
||||
pub fn new(
|
||||
|
@ -394,10 +379,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> Future for JsonBody<T>
|
||||
where
|
||||
T: DeserializeOwned + 'static,
|
||||
{
|
||||
impl<T: DeserializeOwned + 'static> Future for JsonBody<T> {
|
||||
type Output = Result<T, JsonPayloadError>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
use std::{fmt, ops, sync::Arc};
|
||||
|
||||
use actix_utils::future::{err, ok, Ready};
|
||||
use serde::de;
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequest};
|
||||
|
||||
/// Extract typed information from the request's query.
|
||||
///
|
||||
/// To extract typed data from the URL query string, the inner type `T` must implement the
|
||||
/// [`serde::Deserialize`] trait.
|
||||
/// [`DeserializeOwned`] trait.
|
||||
///
|
||||
/// Use [`QueryConfig`] to configure extraction process.
|
||||
///
|
||||
|
@ -46,18 +46,18 @@ use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequ
|
|||
/// // To access the entire underlying query struct, use `.into_inner()`.
|
||||
/// #[get("/debug1")]
|
||||
/// async fn debug1(info: web::Query<AuthRequest>) -> String {
|
||||
/// dbg!("Authorization object={:?}", info.into_inner());
|
||||
/// dbg!("Authorization object = {:?}", info.into_inner());
|
||||
/// "OK".to_string()
|
||||
/// }
|
||||
///
|
||||
/// // Or use `.0`, which is equivalent to `.into_inner()`.
|
||||
/// // Or use destructuring, which is equivalent to `.into_inner()`.
|
||||
/// #[get("/debug2")]
|
||||
/// async fn debug2(info: web::Query<AuthRequest>) -> String {
|
||||
/// dbg!("Authorization object={:?}", info.0);
|
||||
/// async fn debug2(web::Query(info): web::Query<AuthRequest>) -> String {
|
||||
/// dbg!("Authorization object = {:?}", info);
|
||||
/// "OK".to_string()
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Query<T>(pub T);
|
||||
|
||||
impl<T> Query<T> {
|
||||
|
@ -65,8 +65,10 @@ impl<T> Query<T> {
|
|||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialize `T` from a URL encoded query parameter string.
|
||||
impl<T: DeserializeOwned> Query<T> {
|
||||
/// Deserialize a `T` from the URL encoded query parameter string.
|
||||
///
|
||||
/// ```
|
||||
/// # use std::collections::HashMap;
|
||||
|
@ -76,10 +78,7 @@ impl<T> Query<T> {
|
|||
/// assert_eq!(numbers.get("two"), Some(&2));
|
||||
/// assert!(numbers.get("three").is_none());
|
||||
/// ```
|
||||
pub fn from_query(query_str: &str) -> Result<Self, QueryPayloadError>
|
||||
where
|
||||
T: de::DeserializeOwned,
|
||||
{
|
||||
pub fn from_query(query_str: &str) -> Result<Self, QueryPayloadError> {
|
||||
serde_urlencoded::from_str::<T>(query_str)
|
||||
.map(Self)
|
||||
.map_err(QueryPayloadError::Deserialize)
|
||||
|
@ -107,10 +106,7 @@ impl<T: fmt::Display> fmt::Display for Query<T> {
|
|||
}
|
||||
|
||||
/// See [here](#usage) for example of usage as an extractor.
|
||||
impl<T> FromRequest for Query<T>
|
||||
where
|
||||
T: de::DeserializeOwned,
|
||||
{
|
||||
impl<T: DeserializeOwned> FromRequest for Query<T> {
|
||||
type Error = Error;
|
||||
type Future = Ready<Result<Self, Error>>;
|
||||
|
||||
|
@ -164,7 +160,7 @@ where
|
|||
/// let query_cfg = web::QueryConfig::default()
|
||||
/// // use custom error handler
|
||||
/// .error_handler(|err, req| {
|
||||
/// error::InternalError::from_response(err, HttpResponse::Conflict().into()).into()
|
||||
/// error::InternalError::from_response(err, HttpResponse::Conflict().finish()).into()
|
||||
/// });
|
||||
///
|
||||
/// App::new()
|
||||
|
|
220
src/web.rs
220
src/web.rs
|
@ -3,44 +3,36 @@
|
|||
use std::future::Future;
|
||||
|
||||
use actix_http::http::Method;
|
||||
pub use actix_http::Response as HttpResponse;
|
||||
use actix_router::IntoPatterns;
|
||||
pub use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||
|
||||
use crate::error::BlockingError;
|
||||
use crate::extract::FromRequest;
|
||||
use crate::handler::Handler;
|
||||
use crate::resource::Resource;
|
||||
use crate::responder::Responder;
|
||||
use crate::route::Route;
|
||||
use crate::scope::Scope;
|
||||
use crate::service::WebService;
|
||||
use crate::{
|
||||
error::BlockingError, extract::FromRequest, handler::Handler, resource::Resource,
|
||||
responder::Responder, route::Route, scope::Scope, service::WebService,
|
||||
};
|
||||
|
||||
pub use crate::config::ServiceConfig;
|
||||
pub use crate::data::Data;
|
||||
pub use crate::request::HttpRequest;
|
||||
pub use crate::request_data::ReqData;
|
||||
pub use crate::response::HttpResponse;
|
||||
pub use crate::types::*;
|
||||
|
||||
/// Create resource for a specific path.
|
||||
/// Creates a new resource for a specific path.
|
||||
///
|
||||
/// Resources may have variable path segments. For example, a
|
||||
/// resource with the path `/a/{name}/c` would match all incoming
|
||||
/// requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`.
|
||||
/// Resources may have dynamic path segments. For example, a resource with the path `/a/{name}/c`
|
||||
/// would match all incoming requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`.
|
||||
///
|
||||
/// A variable segment is specified in the form `{identifier}`,
|
||||
/// where the identifier can be used later in a request handler to
|
||||
/// access the matched value for that segment. This is done by
|
||||
/// looking up the identifier in the `Params` object returned by
|
||||
/// `HttpRequest.match_info()` method.
|
||||
/// A dynamic segment is specified in the form `{identifier}`, where the identifier can be used
|
||||
/// later in a request handler to access the matched value for that segment. This is done by looking
|
||||
/// up the identifier in the `Path` object returned by [`HttpRequest.match_info()`] method.
|
||||
///
|
||||
/// By default, each segment matches the regular expression `[^{}/]+`.
|
||||
///
|
||||
/// You can also specify a custom regex in the form `{identifier:regex}`:
|
||||
///
|
||||
/// For instance, to route `GET`-requests on any route matching
|
||||
/// `/users/{userid}/{friend}` and store `userid` and `friend` in
|
||||
/// the exposed `Params` object:
|
||||
/// For instance, to route `GET`-requests on any route matching `/users/{userid}/{friend}` and store
|
||||
/// `userid` and `friend` in the exposed `Path` object:
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
|
@ -55,10 +47,16 @@ pub fn resource<T: IntoPatterns>(path: T) -> Resource {
|
|||
Resource::new(path)
|
||||
}
|
||||
|
||||
/// Configure scope for common root path.
|
||||
/// Creates scope for common path prefix.
|
||||
///
|
||||
/// Scopes collect multiple paths under a common path prefix.
|
||||
/// Scope path can contain variable path segments as resources.
|
||||
/// Scopes collect multiple paths under a common path prefix. The scope's path can contain dynamic
|
||||
/// path segments.
|
||||
///
|
||||
/// # Examples
|
||||
/// In this example, three routes are set up (and will handle any method):
|
||||
/// * `/{project_id}/path1`
|
||||
/// * `/{project_id}/path2`
|
||||
/// * `/{project_id}/path3`
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
|
@ -70,148 +68,50 @@ pub fn resource<T: IntoPatterns>(path: T) -> Resource {
|
|||
/// .service(web::resource("/path3").to(|| HttpResponse::MethodNotAllowed()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, three routes get added:
|
||||
/// * /{project_id}/path1
|
||||
/// * /{project_id}/path2
|
||||
/// * /{project_id}/path3
|
||||
///
|
||||
pub fn scope(path: &str) -> Scope {
|
||||
Scope::new(path)
|
||||
}
|
||||
|
||||
/// Create *route* without configuration.
|
||||
/// Creates a new un-configured route.
|
||||
pub fn route() -> Route {
|
||||
Route::new()
|
||||
}
|
||||
|
||||
/// Create *route* with `GET` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::get().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `GET` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn get() -> Route {
|
||||
method(Method::GET)
|
||||
macro_rules! method_route {
|
||||
($method_fn:ident, $method_const:ident) => {
|
||||
paste::paste! {
|
||||
#[doc = " Creates a new route with `" $method_const "` method guard."]
|
||||
///
|
||||
/// # Examples
|
||||
#[doc = " In this example, one `" $method_const " /{project_id}` route is set up:"]
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
#[doc = " .route(web::" $method_fn "().to(|| HttpResponse::Ok()))"]
|
||||
///
|
||||
/// );
|
||||
/// ```
|
||||
pub fn $method_fn() -> Route {
|
||||
method(Method::$method_const)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Create *route* with `POST` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::post().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `POST` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn post() -> Route {
|
||||
method(Method::POST)
|
||||
}
|
||||
method_route!(get, GET);
|
||||
method_route!(post, POST);
|
||||
method_route!(put, PUT);
|
||||
method_route!(patch, PATCH);
|
||||
method_route!(delete, DELETE);
|
||||
method_route!(head, HEAD);
|
||||
method_route!(trace, TRACE);
|
||||
|
||||
/// Create *route* with `PUT` method guard.
|
||||
/// Creates a new route with specified method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::put().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `PUT` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn put() -> Route {
|
||||
method(Method::PUT)
|
||||
}
|
||||
|
||||
/// Create *route* with `PATCH` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::patch().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `PATCH` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn patch() -> Route {
|
||||
method(Method::PATCH)
|
||||
}
|
||||
|
||||
/// Create *route* with `DELETE` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::delete().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `DELETE` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn delete() -> Route {
|
||||
method(Method::DELETE)
|
||||
}
|
||||
|
||||
/// Create *route* with `HEAD` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::head().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `HEAD` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn head() -> Route {
|
||||
method(Method::HEAD)
|
||||
}
|
||||
|
||||
/// Create *route* with `TRACE` method guard.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse};
|
||||
///
|
||||
/// let app = App::new().service(
|
||||
/// web::resource("/{project_id}")
|
||||
/// .route(web::trace().to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `HEAD` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn trace() -> Route {
|
||||
method(Method::TRACE)
|
||||
}
|
||||
|
||||
/// Create *route* and add method guard.
|
||||
/// # Examples
|
||||
/// In this example, one `GET /{project_id}` route is set up:
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, http, App, HttpResponse};
|
||||
|
@ -221,15 +121,11 @@ pub fn trace() -> Route {
|
|||
/// .route(web::method(http::Method::GET).to(|| HttpResponse::Ok()))
|
||||
/// );
|
||||
/// ```
|
||||
///
|
||||
/// In the above example, one `GET` route gets added:
|
||||
/// * /{project_id}
|
||||
///
|
||||
pub fn method(method: Method) -> Route {
|
||||
Route::new().method(method)
|
||||
}
|
||||
|
||||
/// Create a new route and add handler.
|
||||
/// Creates a new any-method route with handler.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{web, App, HttpResponse, Responder};
|
||||
|
@ -253,7 +149,7 @@ where
|
|||
Route::new().to(handler)
|
||||
}
|
||||
|
||||
/// Create raw service for a specific path.
|
||||
/// Creates a raw service for a specific path.
|
||||
///
|
||||
/// ```
|
||||
/// use actix_web::{dev, web, guard, App, Error, HttpResponse};
|
||||
|
@ -272,8 +168,8 @@ pub fn service<T: IntoPatterns>(path: T) -> WebService {
|
|||
WebService::new(path)
|
||||
}
|
||||
|
||||
/// Execute blocking function on a thread pool, returns future that resolves
|
||||
/// to result of the function execution.
|
||||
/// Executes blocking function on a thread pool, returns future that resolves to result of the
|
||||
/// function execution.
|
||||
pub fn block<F, R>(f: F) -> impl Future<Output = Result<R, BlockingError>>
|
||||
where
|
||||
F: FnOnce() -> R + Send + 'static,
|
||||
|
|
|
@ -1077,3 +1077,22 @@ async fn test_data_drop() {
|
|||
|
||||
assert_eq!(num.load(Ordering::SeqCst), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_accept_encoding_no_match() {
|
||||
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||
App::new()
|
||||
.wrap(Compress::default())
|
||||
.service(web::resource("/").route(web::to(move || HttpResponse::Ok().finish())))
|
||||
});
|
||||
|
||||
let response = srv
|
||||
.get("/")
|
||||
.append_header((ACCEPT_ENCODING, "compress, identity;q=0"))
|
||||
.no_decompress()
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(response.status().as_u16(), 406);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue