Merge branch 'master' into ServiceConfig-default-service

This commit is contained in:
Rob Ede 2021-10-11 13:50:37 +01:00 committed by GitHub
commit 7449f416e4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
110 changed files with 6235 additions and 1398 deletions

View File

@ -6,4 +6,4 @@ ci-min-test = "hack check --workspace --no-default-features --tests --examples"
ci-default = "check --workspace --bins --tests --examples" ci-default = "check --workspace --bins --tests --examples"
ci-full = "check --workspace --all-features --bins --tests --examples" ci-full = "check --workspace --all-features --bins --tests --examples"
ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture" ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture"
ci-doctest = "hack test --workspace --all-features --doc --no-fail-fast -- --nocapture" ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture"

View File

@ -16,7 +16,7 @@ jobs:
- { name: macOS, os: macos-latest, triple: x86_64-apple-darwin } - { name: macOS, os: macos-latest, triple: x86_64-apple-darwin }
- { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc } - { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc }
version: version:
- 1.46.0 # MSRV - 1.51.0 # MSRV
- stable - stable
- nightly - nightly
@ -24,6 +24,8 @@ jobs:
runs-on: ${{ matrix.target.os }} runs-on: ${{ matrix.target.os }}
env: env:
CI: 1
CARGO_INCREMENTAL: 0
VCPKGRS_DYNAMIC: 1 VCPKGRS_DYNAMIC: 1
steps: steps:
@ -80,13 +82,6 @@ jobs:
command: ci-test command: ci-test
args: --skip=test_reading_deflate_encoding_large_random_rustls args: --skip=test_reading_deflate_encoding_large_random_rustls
- name: doc tests
# due to unknown issue with running doc tests on macOS
if: matrix.target.os == 'ubuntu-latest'
uses: actions-rs/cargo@v1
timeout-minutes: 40
with: { command: ci-doctest }
- name: Generate coverage file - name: Generate coverage file
if: > if: >
matrix.target.os == 'ubuntu-latest' matrix.target.os == 'ubuntu-latest'
@ -106,5 +101,36 @@ jobs:
- name: Clear the cargo caches - name: Clear the cargo caches
run: | run: |
cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean
cargo-cache cargo-cache
rustdoc:
name: rustdoc
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust (nightly)
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-x86_64-unknown-linux-gnu
profile: minimal
override: true
- name: Generate Cargo.lock
uses: actions-rs/cargo@v1
with: { command: generate-lockfile }
- name: Cache Dependencies
uses: Swatinem/rust-cache@v1.3.0
- name: Install cargo-hack
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-hack
- name: doc tests
uses: actions-rs/cargo@v1
timeout-minutes: 40
with: { command: ci-doctest }

View File

@ -2,13 +2,39 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
### Added ### Added
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325] * Option to allow `Json` extractor to work without a `Content-Type` header present. [#2362]
* Add `ServiceConfig::default_service`. [#2338] * Add `ServiceConfig::default_service`. [#2338]
[#2325]: https://github.com/actix/actix-web/pull/2325 ### Changed
* Associated type `FromRequest::Config` was removed. [#2233]
* Inner field made private on `web::Payload`. [#2384]
[#2233]: https://github.com/actix/actix-web/pull/2233
[#2362]: https://github.com/actix/actix-web/pull/2362
[#2384]: https://github.com/actix/actix-web/pull/2384
[#2338]: https://github.com/actix/actix-web/pull/2338 [#2338]: https://github.com/actix/actix-web/pull/2338
## 4.0.0-beta.9 - 2021-09-09
### Added
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
### Changed
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
* Move `BaseHttpResponse` to `dev::Response`. [#2379]
* Enable `TestRequest::param` to accept more than just static strings. [#2172]
* Minimum supported Rust version (MSRV) is now 1.51.
### Fixed
* Fix quality parse error in Accept-Encoding header. [#2344]
* Re-export correct type at `web::HttpResponse`. [#2379]
[#2172]: https://github.com/actix/actix-web/pull/2172
[#2325]: https://github.com/actix/actix-web/pull/2325
[#2344]: https://github.com/actix/actix-web/pull/2344
[#2379]: https://github.com/actix/actix-web/pull/2379
## 4.0.0-beta.8 - 2021-06-26 ## 4.0.0-beta.8 - 2021-06-26
### Added ### Added
* Add `ServiceRequest::parts_mut`. [#2177] * Add `ServiceRequest::parts_mut`. [#2177]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web" name = "actix-web"
version = "4.0.0-beta.8" version = "4.0.0-beta.9"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust" description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
@ -18,12 +18,14 @@ edition = "2018"
[package.metadata.docs.rs] [package.metadata.docs.rs]
# features that docs.rs will build with # features that docs.rs will build with
features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"] features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"]
rustdoc-args = ["--cfg", "docsrs"]
[lib] [lib]
name = "actix_web" name = "actix_web"
path = "src/lib.rs" path = "src/lib.rs"
[workspace] [workspace]
resolver = "2"
members = [ members = [
".", ".",
"awc", "awc",
@ -34,6 +36,7 @@ members = [
"actix-web-codegen", "actix-web-codegen",
"actix-http-test", "actix-http-test",
"actix-test", "actix-test",
"actix-router",
] ]
# enable when MSRV is 1.51+ # enable when MSRV is 1.51+
# resolver = "2" # resolver = "2"
@ -67,15 +70,15 @@ __compress = []
[dependencies] [dependencies]
actix-codec = "0.4.0" actix-codec = "0.4.0"
actix-macros = "0.2.1" actix-macros = "0.2.1"
actix-router = "0.2.7" actix-router = "0.5.0-beta.2"
actix-rt = "2.2" actix-rt = "2.2"
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.3"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true } actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
actix-web-codegen = "0.5.0-beta.2" actix-web-codegen = "0.5.0-beta.4"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
ahash = "0.7" ahash = "0.7"
bytes = "1" bytes = "1"
@ -97,14 +100,14 @@ regex = "1.4"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
serde_urlencoded = "0.7" serde_urlencoded = "0.7"
smallvec = "1.6" smallvec = "1.6.1"
socket2 = "0.4.0" socket2 = "0.4.0"
time = { version = "0.2.23", default-features = false, features = ["std"] } time = { version = "0.3", default-features = false, features = ["formatting"] }
url = "2.1" url = "2.1"
[dev-dependencies] [dev-dependencies]
actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] } actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] }
awc = { version = "3.0.0-beta.7", features = ["openssl"] } awc = { version = "3.0.0-beta.8", features = ["openssl"] }
brotli2 = "0.3.2" brotli2 = "0.3.2"
criterion = { version = "0.3", features = ["html_reports"] } criterion = { version = "0.3", features = ["html_reports"] }
@ -116,6 +119,10 @@ rcgen = "0.8"
tls-openssl = { package = "openssl", version = "0.10.9" } tls-openssl = { package = "openssl", version = "0.10.9" }
tls-rustls = { package = "rustls", version = "0.19.0" } tls-rustls = { package = "rustls", version = "0.19.0" }
[profile.dev]
# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much.
debug = 0
[profile.release] [profile.release]
lto = true lto = true
opt-level = 3 opt-level = 3
@ -126,6 +133,7 @@ actix-files = { path = "actix-files" }
actix-http = { path = "actix-http" } actix-http = { path = "actix-http" }
actix-http-test = { path = "actix-http-test" } actix-http-test = { path = "actix-http-test" }
actix-multipart = { path = "actix-multipart" } actix-multipart = { path = "actix-multipart" }
actix-router = { path = "actix-router" }
actix-test = { path = "actix-test" } actix-test = { path = "actix-test" }
actix-web = { path = "." } actix-web = { path = "." }
actix-web-actors = { path = "actix-web-actors" } actix-web-actors = { path = "actix-web-actors" }

View File

@ -3,13 +3,16 @@
* The default `NormalizePath` behavior now strips trailing slashes by default. This was * The default `NormalizePath` behavior now strips trailing slashes by default. This was
previously documented to be the case in v3 but the behavior now matches. The effect is that previously documented to be the case in v3 but the behavior now matches. The effect is that
routes defined with trailing slashes will become inaccessible when routes defined with trailing slashes will become inaccessible when
using `NormalizePath::default()`. using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning.
It is advised that the `new` method be used instead.
Before: `#[get("/test/")]` Before: `#[get("/test/")]`
After: `#[get("/test")]` After: `#[get("/test")]`
Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`. Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`.
* The `type Config` of `FromRequest` was removed.
* Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd). * Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd).
By default all compression algorithms are enabled. By default all compression algorithms are enabled.
To select algorithm you want to include with `middleware::Compress` use following flags: To select algorithm you want to include with `middleware::Compress` use following flags:

View File

@ -6,10 +6,10 @@
<p> <p>
[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web) [![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web)
[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.8)](https://docs.rs/actix-web/4.0.0-beta.8) [![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.9)](https://docs.rs/actix-web/4.0.0-beta.9)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg)
[![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.8) [![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.9)
<br /> <br />
[![build status](https://github.com/actix/actix-web/workflows/CI%20%28Linux%29/badge.svg?branch=master&event=push)](https://github.com/actix/actix-web/actions) [![build status](https://github.com/actix/actix-web/workflows/CI%20%28Linux%29/badge.svg?branch=master&event=push)](https://github.com/actix/actix-web/actions)
[![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web) [![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web)
@ -32,7 +32,7 @@
* SSL support using OpenSSL or Rustls * SSL support using OpenSSL or Rustls
* Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/)) * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
* Includes an async [HTTP client](https://docs.rs/awc/) * Includes an async [HTTP client](https://docs.rs/awc/)
* Runs on stable Rust 1.46+ * Runs on stable Rust 1.51+
## Documentation ## Documentation

View File

@ -3,6 +3,10 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.6.0-beta.7 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.6.0-beta.6 - 2021-06-26 ## 0.6.0-beta.6 - 2021-06-26
* Added `Files::path_filter()`. [#2274] * Added `Files::path_filter()`. [#2274]
* `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228] * `Files::show_files_listing()` can now be used with `Files::index_file()` to show files listing as a fallback when the index file is not found. [#2228]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-files" name = "actix-files"
version = "0.6.0-beta.6" version = "0.6.0-beta.7"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Static file serving for Actix Web" description = "Static file serving for Actix Web"
keywords = ["actix", "http", "async", "futures"] keywords = ["actix", "http", "async", "futures"]
@ -15,8 +15,8 @@ name = "actix_files"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.9", default-features = false }
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0" actix-utils = "3.0.0"
@ -33,5 +33,5 @@ percent-encoding = "2.1"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-web = "4.0.0-beta.8" actix-web = "4.0.0-beta.9"
actix-test = "0.1.0-beta.3" actix-test = "0.1.0-beta.3"

View File

@ -3,11 +3,11 @@
> Static file serving for Actix Web > Static file serving for Actix Web
[![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files)
[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.6)](https://docs.rs/actix-files/0.6.0-beta.6) [![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.7)](https://docs.rs/actix-files/0.6.0-beta.7)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![License](https://img.shields.io/crates/l/actix-files.svg) ![License](https://img.shields.io/crates/l/actix-files.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.6/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.6) [![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.7/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.7)
[![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files) [![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
@ -15,4 +15,4 @@
- [API Documentation](https://docs.rs/actix-files/) - [API Documentation](https://docs.rs/actix-files/)
- [Example Project](https://github.com/actix/examples/tree/master/basics/static_index) - [Example Project](https://github.com/actix/examples/tree/master/basics/static_index)
- Minimum supported Rust version: 1.46 or later - Minimum supported Rust version: 1.51 or later

View File

@ -21,6 +21,7 @@ impl ResponseError for FilesError {
} }
} }
#[allow(clippy::enum_variant_names)]
#[derive(Display, Debug, PartialEq)] #[derive(Display, Debug, PartialEq)]
pub enum UriSegmentError { pub enum UriSegmentError {
/// The segment started with the wrapped invalid character. /// The segment started with the wrapped invalid character.

View File

@ -106,7 +106,7 @@ impl Files {
}; };
Files { Files {
path: mount_path.to_owned(), path: mount_path.trim_end_matches('/').to_owned(),
directory: dir, directory: dir,
index: None, index: None,
show_index: false, show_index: false,

View File

@ -59,7 +59,6 @@ impl AsRef<Path> for PathBufWrap {
impl FromRequest for PathBufWrap { impl FromRequest for PathBufWrap {
type Error = UriSegmentError; type Error = UriSegmentError;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ready(req.match_info().path().parse()) ready(req.match_info().path().parse())

View File

@ -3,6 +3,10 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 3.0.0-beta.5 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 3.0.0-beta.4 - 2021-04-02 ## 3.0.0-beta.4 - 2021-04-02
* Added `TestServer::client_headers` method. [#2097] * Added `TestServer::client_headers` method. [#2097]

View File

@ -1,18 +1,18 @@
[package] [package]
name = "actix-http-test" name = "actix-http-test"
version = "3.0.0-beta.4" version = "3.0.0-beta.5"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Various helpers for Actix applications to use during testing" description = "Various helpers for Actix applications to use during testing"
readme = "README.md"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git" repository = "https://github.com/actix/actix-web.git"
documentation = "https://docs.rs/actix-http-test/" categories = [
categories = ["network-programming", "asynchronous", "network-programming",
"web-programming::http-server", "asynchronous",
"web-programming::websocket"] "web-programming::http-server",
"web-programming::websocket",
]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
exclude = [".gitignore", ".cargo/config"]
edition = "2018" edition = "2018"
[package.metadata.docs.rs] [package.metadata.docs.rs]
@ -35,7 +35,7 @@ actix-tls = "3.0.0-beta.5"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-rt = "2.2" actix-rt = "2.2"
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.3"
awc = { version = "3.0.0-beta.7", default-features = false } awc = { version = "3.0.0-beta.8", default-features = false }
base64 = "0.13" base64 = "0.13"
bytes = "1" bytes = "1"
@ -47,9 +47,8 @@ serde = "1.0"
serde_json = "1.0" serde_json = "1.0"
slab = "0.4" slab = "0.4"
serde_urlencoded = "0.7" serde_urlencoded = "0.7"
time = { version = "0.2.23", default-features = false, features = ["std"] }
tls-openssl = { version = "0.10.9", package = "openssl", optional = true } tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
[dev-dependencies] [dev-dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"

View File

@ -3,15 +3,15 @@
> Various helpers for Actix applications to use during testing. > Various helpers for Actix applications to use during testing.
[![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test)
[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.4)](https://docs.rs/actix-http-test/3.0.0-beta.4) [![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.5)](https://docs.rs/actix-http-test/3.0.0-beta.5)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
<br> <br>
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.4/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.4) [![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.5/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.5)
[![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test) [![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http-test) - [API Documentation](https://docs.rs/actix-http-test)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.51.0

View File

@ -7,8 +7,7 @@
#[cfg(feature = "openssl")] #[cfg(feature = "openssl")]
extern crate tls_openssl as openssl; extern crate tls_openssl as openssl;
use std::sync::mpsc; use std::{net, sync::mpsc, thread, time::Duration};
use std::{net, thread, time};
use actix_codec::{AsyncRead, AsyncWrite, Framed}; use actix_codec::{AsyncRead, AsyncWrite, Framed};
use actix_rt::{net::TcpStream, System}; use actix_rt::{net::TcpStream, System};
@ -95,15 +94,15 @@ pub async fn test_server_with_addr<F: ServiceFactory<TcpStream>>(
.set_alpn_protos(b"\x02h2\x08http/1.1") .set_alpn_protos(b"\x02h2\x08http/1.1")
.map_err(|e| log::error!("Can not set alpn protocol: {:?}", e)); .map_err(|e| log::error!("Can not set alpn protocol: {:?}", e));
Connector::new() Connector::new()
.conn_lifetime(time::Duration::from_secs(0)) .conn_lifetime(Duration::from_secs(0))
.timeout(time::Duration::from_millis(30000)) .timeout(Duration::from_millis(30000))
.ssl(builder.build()) .ssl(builder.build())
} }
#[cfg(not(feature = "openssl"))] #[cfg(not(feature = "openssl"))]
{ {
Connector::new() Connector::new()
.conn_lifetime(time::Duration::from_secs(0)) .conn_lifetime(Duration::from_secs(0))
.timeout(time::Duration::from_millis(30000)) .timeout(Duration::from_millis(30000))
} }
}; };

View File

@ -3,6 +3,27 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 3.0.0-beta.10 - 2021-09-09
### Changed
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
* Minimum supported Rust version (MSRV) is now 1.51.
### Fixed
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
* Fix quality parse error in Accept-Encoding header. [#2344]
[#2364]: https://github.com/actix/actix-web/pull/2364
[#2375]: https://github.com/actix/actix-web/pull/2375
[#2344]: https://github.com/actix/actix-web/pull/2344
[#2377]: https://github.com/actix/actix-web/pull/2377
## 3.0.0-beta.9 - 2021-08-09
### Fixed
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
## 3.0.0-beta.8 - 2021-06-26 ## 3.0.0-beta.8 - 2021-06-26
### Changed ### Changed
* Change compression algorithm features flags. [#2250] * Change compression algorithm features flags. [#2250]
@ -210,6 +231,11 @@
[#1878]: https://github.com/actix/actix-web/pull/1878 [#1878]: https://github.com/actix/actix-web/pull/1878
## 2.2.1 - 2021-08-09
### Fixed
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
## 2.2.0 - 2020-11-25 ## 2.2.0 - 2020-11-25
### Added ### Added
* HttpResponse builders for 1xx status codes. [#1768] * HttpResponse builders for 1xx status codes. [#1768]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-http" name = "actix-http"
version = "3.0.0-beta.8" version = "3.0.0-beta.10"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "HTTP primitives for the Actix ecosystem" description = "HTTP primitives for the Actix ecosystem"
keywords = ["actix", "http", "framework", "async", "futures"] keywords = ["actix", "http", "framework", "async", "futures"]
@ -59,7 +59,8 @@ futures-core = { version = "0.3.7", default-features = false, features = ["alloc
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] } futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
h2 = "0.3.1" h2 = "0.3.1"
http = "0.2.2" http = "0.2.2"
httparse = "1.3" httparse = "1.5.1"
httpdate = "1.0.1"
itoa = "0.4" itoa = "0.4"
language-tags = "0.3" language-tags = "0.3"
local-channel = "0.1" local-channel = "0.1"
@ -70,11 +71,8 @@ percent-encoding = "2.1"
pin-project = "1.0.0" pin-project = "1.0.0"
pin-project-lite = "0.2" pin-project-lite = "0.2"
rand = "0.8" rand = "0.8"
regex = "1.3"
serde = "1.0"
sha-1 = "0.9" sha-1 = "0.9"
smallvec = "1.6" smallvec = "1.6.1"
time = { version = "0.2.23", default-features = false, features = ["std"] }
tokio = { version = "1.2", features = ["sync"] } tokio = { version = "1.2", features = ["sync"] }
# compression # compression
@ -86,17 +84,18 @@ trust-dns-resolver = { version = "0.20.0", optional = true }
[dev-dependencies] [dev-dependencies]
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.3"
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] } actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] } actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] }
async-stream = "0.3" async-stream = "0.3"
criterion = { version = "0.3", features = ["html_reports"] } criterion = { version = "0.3", features = ["html_reports"] }
env_logger = "0.8" env_logger = "0.8"
rcgen = "0.8" rcgen = "0.8"
regex = "1.3"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
tls-openssl = { version = "0.10", package = "openssl" } tls-openssl = { version = "0.10", package = "openssl" }
tls-rustls = { version = "0.19", package = "rustls" } tls-rustls = { version = "0.19", package = "rustls" }
webpki = { version = "0.21.0" } webpki = { version = "0.21" }
[[example]] [[example]]
name = "ws" name = "ws"

View File

@ -3,18 +3,18 @@
> HTTP primitives for the Actix ecosystem. > HTTP primitives for the Actix ecosystem.
[![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http)
[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.8)](https://docs.rs/actix-http/3.0.0-beta.8) [![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.10)](https://docs.rs/actix-http/3.0.0-beta.10)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.8) [![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.10/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.10)
[![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-http) - [API Documentation](https://docs.rs/actix-http)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.51.0
## Example ## Example

View File

@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) {
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| { group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
b.iter(|| { b.iter(|| {
let mut buf = black_box([0; 24]); let mut buf = black_box([0; 24]);
_new::write_camel_case(black_box(bts), &mut buf) let len = black_box(bts.len());
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
}); });
}); });
} }
@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case);
criterion_main!(benches); criterion_main!(benches);
mod _new { mod _new {
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) { pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
// first copy entire (potentially wrong) slice to output // first copy entire (potentially wrong) slice to output
buffer[..value.len()].copy_from_slice(value); let buffer = unsafe {
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
std::slice::from_raw_parts_mut(buf, len)
};
let mut iter = value.iter(); let mut iter = value.iter();

View File

@ -7,7 +7,7 @@ use std::{
}; };
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use futures_core::{ready, Stream}; use futures_core::Stream;
use crate::error::Error; use crate::error::Error;
@ -74,14 +74,10 @@ impl MessageBody for AnyBody {
} }
} }
// TODO: MSRV 1.51: poll_map_err AnyBody::Message(body) => body
AnyBody::Message(body) => match ready!(body.as_pin_mut().poll_next(cx)) { .as_pin_mut()
Some(Err(err)) => { .poll_next(cx)
Poll::Ready(Some(Err(Error::new_body().with_cause(err)))) .map_err(|err| Error::new_body().with_cause(err)),
}
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
} }
} }
} }
@ -223,11 +219,9 @@ impl MessageBody for BoxAnyBody {
mut self: Pin<&mut Self>, mut self: Pin<&mut Self>,
cx: &mut Context<'_>, cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> { ) -> Poll<Option<Result<Bytes, Self::Error>>> {
// TODO: MSRV 1.51: poll_map_err self.0
match ready!(self.0.as_mut().poll_next(cx)) { .as_mut()
Some(Err(err)) => Poll::Ready(Some(Err(Error::new_body().with_cause(err)))), .poll_next(cx)
Some(Ok(val)) => Poll::Ready(Some(Ok(val))), .map_err(|err| Error::new_body().with_cause(err))
None => Poll::Ready(None),
}
} }
} }

View File

@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut};
use futures_core::ready; use futures_core::ready;
use pin_project_lite::pin_project; use pin_project_lite::pin_project;
use crate::error::Error;
use super::BodySize; use super::BodySize;
/// An interface for response bodies. /// An interface for response bodies.
@ -47,7 +45,6 @@ impl MessageBody for () {
impl<B> MessageBody for Box<B> impl<B> MessageBody for Box<B>
where where
B: MessageBody + Unpin, B: MessageBody + Unpin,
B::Error: Into<Error>,
{ {
type Error = B::Error; type Error = B::Error;
@ -66,7 +63,6 @@ where
impl<B> MessageBody for Pin<Box<B>> impl<B> MessageBody for Pin<Box<B>>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = B::Error; type Error = B::Error;

View File

@ -80,7 +80,7 @@ mod tests {
impl Body { impl Body {
pub(crate) fn get_ref(&self) -> &[u8] { pub(crate) fn get_ref(&self) -> &[u8] {
match *self { match *self {
Body::Bytes(ref bin) => &bin, Body::Bytes(ref bin) => bin,
_ => panic!(), _ => panic!(),
} }
} }

View File

@ -5,7 +5,7 @@ use std::{
}; };
use bytes::Bytes; use bytes::Bytes;
use futures_core::{ready, Stream}; use futures_core::Stream;
use pin_project::pin_project; use pin_project::pin_project;
use crate::error::Error; use crate::error::Error;
@ -77,12 +77,7 @@ where
cx: &mut Context<'_>, cx: &mut Context<'_>,
) -> Poll<Option<Self::Item>> { ) -> Poll<Option<Self::Item>> {
match self.project() { match self.project() {
// TODO: MSRV 1.51: poll_map_err ResponseBodyProj::Body(body) => body.poll_next(cx).map_err(Into::into),
ResponseBodyProj::Body(body) => match ready!(body.poll_next(cx)) {
Some(Err(err)) => Poll::Ready(Some(Err(err.into()))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx), ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx),
} }
} }

View File

@ -1,18 +1,19 @@
use std::cell::Cell; use std::{
use std::fmt::Write; cell::Cell,
use std::rc::Rc; fmt::{self, Write},
use std::time::Duration; net,
use std::{fmt, net}; rc::Rc,
time::{Duration, SystemTime},
};
use actix_rt::{ use actix_rt::{
task::JoinHandle, task::JoinHandle,
time::{interval, sleep_until, Instant, Sleep}, time::{interval, sleep_until, Instant, Sleep},
}; };
use bytes::BytesMut; use bytes::BytesMut;
use time::OffsetDateTime;
/// "Sun, 06 Nov 1994 08:49:37 GMT".len() /// "Sun, 06 Nov 1994 08:49:37 GMT".len()
const DATE_VALUE_LENGTH: usize = 29; pub(crate) const DATE_VALUE_LENGTH: usize = 29;
#[derive(Debug, PartialEq, Clone, Copy)] #[derive(Debug, PartialEq, Clone, Copy)]
/// Server keep-alive setting /// Server keep-alive setting
@ -206,12 +207,7 @@ impl Date {
fn update(&mut self) { fn update(&mut self) {
self.pos = 0; self.pos = 0;
write!( write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap();
self,
"{}",
OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT")
)
.unwrap();
} }
} }
@ -269,11 +265,11 @@ impl DateService {
} }
// TODO: move to a util module for testing all spawn handle drop style tasks. // TODO: move to a util module for testing all spawn handle drop style tasks.
#[cfg(test)]
/// Test Module for checking the drop state of certain async tasks that are spawned /// Test Module for checking the drop state of certain async tasks that are spawned
/// with `actix_rt::spawn` /// with `actix_rt::spawn`
/// ///
/// The target task must explicitly generate `NotifyOnDrop` when spawn the task /// The target task must explicitly generate `NotifyOnDrop` when spawn the task
#[cfg(test)]
mod notify_on_drop { mod notify_on_drop {
use std::cell::RefCell; use std::cell::RefCell;
@ -283,9 +279,8 @@ mod notify_on_drop {
/// Check if the spawned task is dropped. /// Check if the spawned task is dropped.
/// ///
/// # Panic: /// # Panics
/// /// Panics when there was no `NotifyOnDrop` instance on current thread.
/// When there was no `NotifyOnDrop` instance on current thread
pub(crate) fn is_dropped() -> bool { pub(crate) fn is_dropped() -> bool {
NOTIFY_DROPPED.with(|bool| { NOTIFY_DROPPED.with(|bool| {
bool.borrow() bool.borrow()

View File

@ -80,7 +80,7 @@ where
let encoding = headers let encoding = headers
.get(&CONTENT_ENCODING) .get(&CONTENT_ENCODING)
.and_then(|val| val.to_str().ok()) .and_then(|val| val.to_str().ok())
.map(ContentEncoding::from) .and_then(|x| x.parse().ok())
.unwrap_or(ContentEncoding::Identity); .unwrap_or(ContentEncoding::Identity);
Self::new(stream, encoding) Self::new(stream, encoding)

View File

@ -29,7 +29,7 @@ use crate::{
header::{ContentEncoding, CONTENT_ENCODING}, header::{ContentEncoding, CONTENT_ENCODING},
HeaderValue, StatusCode, HeaderValue, StatusCode,
}, },
Error, ResponseHead, ResponseHead,
}; };
use super::Writer; use super::Writer;
@ -107,7 +107,6 @@ enum EncoderBody<B> {
impl<B> MessageBody for EncoderBody<B> impl<B> MessageBody for EncoderBody<B>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = EncoderError<B::Error>; type Error = EncoderError<B::Error>;
@ -131,18 +130,9 @@ where
Poll::Ready(Some(Ok(std::mem::take(b)))) Poll::Ready(Some(Ok(std::mem::take(b))))
} }
} }
// TODO: MSRV 1.51: poll_map_err EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body),
EncoderBodyProj::Stream(b) => match ready!(b.poll_next(cx)) {
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Body(err)))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
},
EncoderBodyProj::BoxedStream(ref mut b) => { EncoderBodyProj::BoxedStream(ref mut b) => {
match ready!(b.as_pin_mut().poll_next(cx)) { b.as_pin_mut().poll_next(cx).map_err(EncoderError::Boxed)
Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Boxed(err)))),
Some(Ok(val)) => Poll::Ready(Some(Ok(val))),
None => Poll::Ready(None),
}
} }
} }
} }
@ -151,7 +141,6 @@ where
impl<B> MessageBody for Encoder<B> impl<B> MessageBody for Encoder<B>
where where
B: MessageBody, B: MessageBody,
B::Error: Into<Error>,
{ {
type Error = EncoderError<B::Error>; type Error = EncoderError<B::Error>;

View File

@ -196,7 +196,7 @@ pub enum ParseError {
#[display(fmt = "IO error: {}", _0)] #[display(fmt = "IO error: {}", _0)]
Io(io::Error), Io(io::Error),
/// Parsing a field as string failed /// Parsing a field as string failed.
#[display(fmt = "UTF8 error: {}", _0)] #[display(fmt = "UTF8 error: {}", _0)]
Utf8(Utf8Error), Utf8(Utf8Error),
} }

View File

@ -0,0 +1,432 @@
use std::{io, task::Poll};
use bytes::{Buf as _, Bytes, BytesMut};
macro_rules! byte (
($rdr:ident) => ({
if $rdr.len() > 0 {
let b = $rdr[0];
$rdr.advance(1);
b
} else {
return Poll::Pending
}
})
);
#[derive(Debug, PartialEq, Clone)]
pub(super) enum ChunkedState {
Size,
SizeLws,
Extension,
SizeLf,
Body,
BodyCr,
BodyLf,
EndCr,
EndLf,
End,
}
impl ChunkedState {
pub(super) fn step(
&self,
body: &mut BytesMut,
size: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
use self::ChunkedState::*;
match *self {
Size => ChunkedState::read_size(body, size),
SizeLws => ChunkedState::read_size_lws(body),
Extension => ChunkedState::read_extension(body),
SizeLf => ChunkedState::read_size_lf(body, *size),
Body => ChunkedState::read_body(body, size, buf),
BodyCr => ChunkedState::read_body_cr(body),
BodyLf => ChunkedState::read_body_lf(body),
EndCr => ChunkedState::read_end_cr(body),
EndLf => ChunkedState::read_end_lf(body),
End => Poll::Ready(Ok(ChunkedState::End)),
}
}
fn read_size(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
let radix = 16;
let rem = match byte!(rdr) {
b @ b'0'..=b'9' => b - b'0',
b @ b'a'..=b'f' => b + 10 - b'a',
b @ b'A'..=b'F' => b + 10 - b'A',
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Invalid Size",
)));
}
};
match size.checked_mul(radix) {
Some(n) => {
*size = n as u64;
*size += rem as u64;
Poll::Ready(Ok(ChunkedState::Size))
}
None => {
log::debug!("chunk size would overflow u64");
Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Size is too big",
)))
}
}
}
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
// LWS can follow the chunk size, but no more digits can come
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size linear white space",
))),
}
}
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid character in chunk extension",
))),
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
}
}
fn read_size_lf(
rdr: &mut BytesMut,
size: u64,
) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size LF",
))),
}
}
fn read_body(
rdr: &mut BytesMut,
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
log::trace!("Chunked read, remaining={:?}", rem);
let len = rdr.len() as u64;
if len == 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
let slice;
if *rem > len {
slice = rdr.split().freeze();
*rem -= len;
} else {
slice = rdr.split_to(*rem as usize).freeze();
*rem = 0;
}
*buf = Some(slice);
if *rem > 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
Poll::Ready(Ok(ChunkedState::BodyCr))
}
}
}
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body CR",
))),
}
}
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body LF",
))),
}
}
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end CR",
))),
}
}
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end LF",
))),
}
}
}
#[cfg(test)]
mod tests {
use actix_codec::Decoder as _;
use bytes::{Bytes, BytesMut};
use http::Method;
use crate::{
error::ParseError,
h1::decoder::{MessageDecoder, PayloadItem},
HttpMessage as _, Request,
};
macro_rules! parse_ready {
($e:expr) => {{
match MessageDecoder::<Request>::default().decode($e) {
Ok(Some((msg, _))) => msg,
Ok(_) => unreachable!("Eof during parsing http request"),
Err(err) => unreachable!("Error during parsing http request: {:?}", err),
}
}};
}
macro_rules! expect_parse_err {
($e:expr) => {{
match MessageDecoder::<Request>::default().decode($e) {
Err(err) => match err {
ParseError::Io(_) => unreachable!("Parse error expected"),
_ => {}
},
_ => unreachable!("Error expected"),
}
}};
}
#[test]
fn test_parse_chunked_payload_chunk_extension() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(msg.chunked().unwrap());
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"data"));
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"line"));
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
}
#[test]
fn test_request_chunked() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(val);
} else {
unreachable!("Error");
}
// intentional typo in "chunked"
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chnked\r\n\r\n",
);
expect_parse_err!(&mut buf);
}
#[test]
fn test_http_request_chunked_payload() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"data"
);
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"line"
);
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_http_request_chunked_payload_and_next_message() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
POST /test2 HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n"
.iter(),
);
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"line");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert!(req.chunked().unwrap());
assert_eq!(*req.method(), Method::POST);
assert!(req.chunked().unwrap());
}
#[test]
fn test_http_request_chunked_payload_chunks() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\n1111\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"1111");
buf.extend(b"4\r\ndata\r");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
buf.extend(b"\n4");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\n");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"li");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"li");
//trailers
//buf.feed_data("test: test\r\n");
//not_ready!(reader.parse(&mut buf, &mut readbuf));
buf.extend(b"ne\r\n0\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"ne");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r\n");
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn chunk_extension_quoted() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
2;hello=b;one=\"1 2 3\"\r\n\
xx",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx")));
}
#[test]
fn hrs_chunk_extension_invalid() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
2;x\nx\r\n\
4c\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let err = pl.decode(&mut buf).unwrap_err();
assert!(err
.to_string()
.contains("Invalid character in chunk extension"));
}
#[test]
fn hrs_chunk_size_overflow() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
f0000000000000003\r\n\
abc\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let err = pl.decode(&mut buf).unwrap_err();
assert!(err
.to_string()
.contains("Invalid chunk size line: Size is too big"));
}
}

View File

@ -1,18 +1,18 @@
use std::convert::TryFrom; use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
use std::io;
use std::marker::PhantomData;
use std::task::Poll;
use actix_codec::Decoder; use actix_codec::Decoder;
use bytes::{Buf, Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use http::header::{HeaderName, HeaderValue}; use http::header::{HeaderName, HeaderValue};
use http::{header, Method, StatusCode, Uri, Version}; use http::{header, Method, StatusCode, Uri, Version};
use log::{debug, error, trace}; use log::{debug, error, trace};
use crate::error::ParseError; use super::chunked::ChunkedState;
use crate::header::HeaderMap; use crate::{
use crate::message::{ConnectionType, ResponseHead}; error::ParseError,
use crate::request::Request; header::HeaderMap,
message::{ConnectionType, ResponseHead},
request::Request,
};
pub(crate) const MAX_BUFFER_SIZE: usize = 131_072; pub(crate) const MAX_BUFFER_SIZE: usize = 131_072;
const MAX_HEADERS: usize = 96; const MAX_HEADERS: usize = 96;
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
let mut has_upgrade_websocket = false; let mut has_upgrade_websocket = false;
let mut expect = false; let mut expect = false;
let mut chunked = false; let mut chunked = false;
let mut seen_te = false;
let mut content_length = None; let mut content_length = None;
{ {
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
}; };
match name { match name {
header::CONTENT_LENGTH => { header::CONTENT_LENGTH if content_length.is_some() => {
if let Ok(s) = value.to_str() { debug!("multiple Content-Length");
return Err(ParseError::Header);
}
header::CONTENT_LENGTH => match value.to_str() {
Ok(s) if s.trim().starts_with('+') => {
debug!("illegal Content-Length: {:?}", s);
return Err(ParseError::Header);
}
Ok(s) => {
if let Ok(len) = s.parse::<u64>() { if let Ok(len) = s.parse::<u64>() {
if len != 0 { if len != 0 {
content_length = Some(len); content_length = Some(len);
@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized {
debug!("illegal Content-Length: {:?}", s); debug!("illegal Content-Length: {:?}", s);
return Err(ParseError::Header); return Err(ParseError::Header);
} }
} else { }
Err(_) => {
debug!("illegal Content-Length: {:?}", value); debug!("illegal Content-Length: {:?}", value);
return Err(ParseError::Header); return Err(ParseError::Header);
} }
} },
// transfer-encoding // transfer-encoding
header::TRANSFER_ENCODING if seen_te => {
debug!("multiple Transfer-Encoding not allowed");
return Err(ParseError::Header);
}
header::TRANSFER_ENCODING => { header::TRANSFER_ENCODING => {
seen_te = true;
if let Ok(s) = value.to_str().map(str::trim) { if let Ok(s) = value.to_str().map(str::trim) {
chunked = s.eq_ignore_ascii_case("chunked"); if s.eq_ignore_ascii_case("chunked") {
chunked = true;
} else if s.eq_ignore_ascii_case("identity") {
// allow silently since multiple TE headers are already checked
} else {
debug!("illegal Transfer-Encoding: {:?}", s);
return Err(ParseError::Header);
}
} else { } else {
return Err(ParseError::Header); return Err(ParseError::Header);
} }
@ -186,10 +212,17 @@ impl MessageType for Request {
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY; let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
let (len, method, uri, ver, h_len) = { let (len, method, uri, ver, h_len) = {
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY; // SAFETY:
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
// safe because the type we are claiming to have initialized here is a
// bunch of `MaybeUninit`s, which do not require initialization.
let mut parsed = unsafe {
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
.assume_init()
};
let mut req = httparse::Request::new(&mut parsed); let mut req = httparse::Request::new(&mut []);
match req.parse(src)? { match req.parse_with_uninit_headers(src, &mut parsed)? {
httparse::Status::Complete(len) => { httparse::Status::Complete(len) => {
let method = Method::from_bytes(req.method.unwrap().as_bytes()) let method = Method::from_bytes(req.method.unwrap().as_bytes())
.map_err(|_| ParseError::Method)?; .map_err(|_| ParseError::Method)?;
@ -408,20 +441,6 @@ enum Kind {
Eof, Eof,
} }
#[derive(Debug, PartialEq, Clone)]
enum ChunkedState {
Size,
SizeLws,
Extension,
SizeLf,
Body,
BodyCr,
BodyLf,
EndCr,
EndLf,
End,
}
impl Decoder for PayloadDecoder { impl Decoder for PayloadDecoder {
type Item = PayloadItem; type Item = PayloadItem;
type Error = io::Error; type Error = io::Error;
@ -451,19 +470,23 @@ impl Decoder for PayloadDecoder {
Kind::Chunked(ref mut state, ref mut size) => { Kind::Chunked(ref mut state, ref mut size) => {
loop { loop {
let mut buf = None; let mut buf = None;
// advances the chunked state // advances the chunked state
*state = match state.step(src, size, &mut buf) { *state = match state.step(src, size, &mut buf) {
Poll::Pending => return Ok(None), Poll::Pending => return Ok(None),
Poll::Ready(Ok(state)) => state, Poll::Ready(Ok(state)) => state,
Poll::Ready(Err(e)) => return Err(e), Poll::Ready(Err(e)) => return Err(e),
}; };
if *state == ChunkedState::End { if *state == ChunkedState::End {
trace!("End of chunked stream"); trace!("End of chunked stream");
return Ok(Some(PayloadItem::Eof)); return Ok(Some(PayloadItem::Eof));
} }
if let Some(buf) = buf { if let Some(buf) = buf {
return Ok(Some(PayloadItem::Chunk(buf))); return Ok(Some(PayloadItem::Chunk(buf)));
} }
if src.is_empty() { if src.is_empty() {
return Ok(None); return Ok(None);
} }
@ -480,201 +503,40 @@ impl Decoder for PayloadDecoder {
} }
} }
macro_rules! byte (
($rdr:ident) => ({
if $rdr.len() > 0 {
let b = $rdr[0];
$rdr.advance(1);
b
} else {
return Poll::Pending
}
})
);
impl ChunkedState {
fn step(
&self,
body: &mut BytesMut,
size: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
use self::ChunkedState::*;
match *self {
Size => ChunkedState::read_size(body, size),
SizeLws => ChunkedState::read_size_lws(body),
Extension => ChunkedState::read_extension(body),
SizeLf => ChunkedState::read_size_lf(body, size),
Body => ChunkedState::read_body(body, size, buf),
BodyCr => ChunkedState::read_body_cr(body),
BodyLf => ChunkedState::read_body_lf(body),
EndCr => ChunkedState::read_end_cr(body),
EndLf => ChunkedState::read_end_lf(body),
End => Poll::Ready(Ok(ChunkedState::End)),
}
}
fn read_size(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
let radix = 16;
match byte!(rdr) {
b @ b'0'..=b'9' => {
*size *= radix;
*size += u64::from(b - b'0');
}
b @ b'a'..=b'f' => {
*size *= radix;
*size += u64::from(b + 10 - b'a');
}
b @ b'A'..=b'F' => {
*size *= radix;
*size += u64::from(b + 10 - b'A');
}
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size line: Invalid Size",
)));
}
}
Poll::Ready(Ok(ChunkedState::Size))
}
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
trace!("read_size_lws");
match byte!(rdr) {
// LWS can follow the chunk size, but no more digits can come
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size linear white space",
))),
}
}
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
}
}
fn read_size_lf(
rdr: &mut BytesMut,
size: &mut u64,
) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk size LF",
))),
}
}
fn read_body(
rdr: &mut BytesMut,
rem: &mut u64,
buf: &mut Option<Bytes>,
) -> Poll<Result<ChunkedState, io::Error>> {
trace!("Chunked read, remaining={:?}", rem);
let len = rdr.len() as u64;
if len == 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
let slice;
if *rem > len {
slice = rdr.split().freeze();
*rem -= len;
} else {
slice = rdr.split_to(*rem as usize).freeze();
*rem = 0;
}
*buf = Some(slice);
if *rem > 0 {
Poll::Ready(Ok(ChunkedState::Body))
} else {
Poll::Ready(Ok(ChunkedState::BodyCr))
}
}
}
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body CR",
))),
}
}
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk body LF",
))),
}
}
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end CR",
))),
}
}
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
match byte!(rdr) {
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
_ => Poll::Ready(Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid chunk end LF",
))),
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use http::{Method, Version}; use http::{Method, Version};
use super::*; use super::*;
use crate::error::ParseError; use crate::{
use crate::http::header::{HeaderName, SET_COOKIE}; error::ParseError,
use crate::HttpMessage; http::header::{HeaderName, SET_COOKIE},
HttpMessage as _,
};
impl PayloadType { impl PayloadType {
fn unwrap(self) -> PayloadDecoder { pub(crate) fn unwrap(self) -> PayloadDecoder {
match self { match self {
PayloadType::Payload(pl) => pl, PayloadType::Payload(pl) => pl,
_ => panic!(), _ => panic!(),
} }
} }
fn is_unhandled(&self) -> bool { pub(crate) fn is_unhandled(&self) -> bool {
matches!(self, PayloadType::Stream(_)) matches!(self, PayloadType::Stream(_))
} }
} }
impl PayloadItem { impl PayloadItem {
fn chunk(self) -> Bytes { pub(crate) fn chunk(self) -> Bytes {
match self { match self {
PayloadItem::Chunk(chunk) => chunk, PayloadItem::Chunk(chunk) => chunk,
_ => panic!("error"), _ => panic!("error"),
} }
} }
fn eof(&self) -> bool {
pub(crate) fn eof(&self) -> bool {
matches!(*self, PayloadItem::Eof) matches!(*self, PayloadItem::Eof)
} }
} }
@ -967,34 +829,6 @@ mod tests {
assert!(req.upgrade()); assert!(req.upgrade());
} }
#[test]
fn test_request_chunked() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(val);
} else {
unreachable!("Error");
}
// intentional typo in "chunked"
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chnked\r\n\r\n",
);
let req = parse_ready!(&mut buf);
if let Ok(val) = req.chunked() {
assert!(!val);
} else {
unreachable!("Error");
}
}
#[test] #[test]
fn test_headers_content_length_err_1() { fn test_headers_content_length_err_1() {
let mut buf = BytesMut::from( let mut buf = BytesMut::from(
@ -1112,126 +946,6 @@ mod tests {
expect_parse_err!(&mut buf); expect_parse_err!(&mut buf);
} }
#[test]
fn test_http_request_chunked_payload() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"data"
);
assert_eq!(
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
b"line"
);
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_http_request_chunked_payload_and_next_message() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
POST /test2 HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n"
.iter(),
);
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"line");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
assert!(req.chunked().unwrap());
assert_eq!(*req.method(), Method::POST);
assert!(req.chunked().unwrap());
}
#[test]
fn test_http_request_chunked_payload_chunks() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(req.chunked().unwrap());
buf.extend(b"4\r\n1111\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"1111");
buf.extend(b"4\r\ndata\r");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"data");
buf.extend(b"\n4");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\n");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"li");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"li");
//trailers
//buf.feed_data("test: test\r\n");
//not_ready!(reader.parse(&mut buf, &mut readbuf));
buf.extend(b"ne\r\n0\r\n");
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(msg.chunk().as_ref(), b"ne");
assert!(pl.decode(&mut buf).unwrap().is_none());
buf.extend(b"\r\n");
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
}
#[test]
fn test_parse_chunked_payload_chunk_extension() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
transfer-encoding: chunked\r\n\
\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
assert!(msg.chunked().unwrap());
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"data"));
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
assert_eq!(chunk, Bytes::from_static(b"line"));
let msg = pl.decode(&mut buf).unwrap().unwrap();
assert!(msg.eof());
}
#[test] #[test]
fn test_response_http10_read_until_eof() { fn test_response_http10_read_until_eof() {
let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data"); let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data");
@ -1243,4 +957,84 @@ mod tests {
let chunk = pl.decode(&mut buf).unwrap().unwrap(); let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data"))); assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data")));
} }
#[test]
fn hrs_multiple_content_length() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 4\r\n\
Content-Length: 2\r\n\
\r\n\
abcd",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_content_length_plus() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: +3\r\n\
\r\n\
000",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_unknown_transfer_encoding() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Transfer-Encoding: JUNK\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
5\r\n\
hello\r\n\
0",
);
expect_parse_err!(&mut buf);
}
#[test]
fn hrs_multiple_transfer_encoding() {
let mut buf = BytesMut::from(
"GET / HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 51\r\n\
Transfer-Encoding: identity\r\n\
Transfer-Encoding: chunked\r\n\
\r\n\
0\r\n\
\r\n\
GET /forbidden HTTP/1.1\r\n\
Host: example.com\r\n\r\n",
);
expect_parse_err!(&mut buf);
}
#[test]
fn transfer_encoding_agrees() {
let mut buf = BytesMut::from(
"GET /test HTTP/1.1\r\n\
Host: example.com\r\n\
Content-Length: 3\r\n\
Transfer-Encoding: identity\r\n\
\r\n\
0\r\n",
);
let mut reader = MessageDecoder::<Request>::default();
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
let mut pl = pl.unwrap();
let chunk = pl.decode(&mut buf).unwrap().unwrap();
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n")));
}
} }

View File

@ -1060,7 +1060,7 @@ mod tests {
fn stabilize_date_header(payload: &mut [u8]) { fn stabilize_date_header(payload: &mut [u8]) {
let mut from = 0; let mut from = 0;
while let Some(pos) = find_slice(&payload, b"date", from) { while let Some(pos) = find_slice(payload, b"date", from) {
payload[(from + pos)..(from + pos + 35)] payload[(from + pos)..(from + pos + 35)]
.copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC"); .copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC");
from += 35; from += 35;

View File

@ -81,6 +81,7 @@ pub(crate) trait MessageType: Sized {
match length { match length {
BodySize::Stream => { BodySize::Stream => {
if chunked { if chunked {
skip_len = true;
if camel_case { if camel_case {
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n") dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
} else { } else {
@ -174,7 +175,7 @@ pub(crate) trait MessageType: Sized {
unsafe { unsafe {
if camel_case { if camel_case {
// use Camel-Case headers // use Camel-Case headers
write_camel_case(k, from_raw_parts_mut(buf, k_len)); write_camel_case(k, buf, k_len);
} else { } else {
write_data(k, buf, k_len); write_data(k, buf, k_len);
} }
@ -472,15 +473,22 @@ impl TransferEncoding {
} }
/// # Safety /// # Safety
/// Callers must ensure that the given length matches given value length. /// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
/// valid for writes of at least `len` bytes.
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) { unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
debug_assert_eq!(value.len(), len); debug_assert_eq!(value.len(), len);
copy_nonoverlapping(value.as_ptr(), buf, len); copy_nonoverlapping(value.as_ptr(), buf, len);
} }
fn write_camel_case(value: &[u8], buffer: &mut [u8]) { /// # Safety
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
/// valid for writes of at least `len` bytes.
unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
// first copy entire (potentially wrong) slice to output // first copy entire (potentially wrong) slice to output
buffer[..value.len()].copy_from_slice(value); write_data(value, buf, len);
// SAFETY: We just initialized the buffer with `value`
let buffer = from_raw_parts_mut(buf, len);
let mut iter = value.iter(); let mut iter = value.iter();

View File

@ -1,6 +1,8 @@
//! HTTP/1 protocol implementation. //! HTTP/1 protocol implementation.
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
mod chunked;
mod client; mod client;
mod codec; mod codec;
mod decoder; mod decoder;

View File

@ -63,7 +63,6 @@ where
.is_write_buf_full() .is_write_buf_full()
{ {
let next = let next =
// TODO: MSRV 1.51: poll_map_err
match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) { match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) {
Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)), Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)),
Poll::Ready(Some(Err(err))) => { Poll::Ready(Some(Err(err))) => {

View File

@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> {
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
// handle in-progress multi value lists first // handle in-progress multi value lists first
if let Some((ref name, ref mut vals)) = self.multi_inner { if let Some((name, ref mut vals)) = self.multi_inner {
match vals.get(self.multi_idx) { match vals.get(self.multi_idx) {
Some(val) => { Some(val) => {
self.multi_idx += 1; self.multi_idx += 1;

View File

@ -1,5 +1,6 @@
use std::{convert::Infallible, str::FromStr}; use std::{convert::TryFrom, str::FromStr};
use derive_more::{Display, Error};
use http::header::InvalidHeaderValue; use http::header::InvalidHeaderValue;
use crate::{ use crate::{
@ -8,8 +9,16 @@ use crate::{
HttpMessage, HttpMessage,
}; };
/// Error return when a content encoding is unknown.
///
/// Example: 'compress'
#[derive(Debug, Display, Error)]
#[display(fmt = "unsupported content encoding")]
pub struct ContentEncodingParseError;
/// Represents a supported content encoding. /// Represents a supported content encoding.
#[derive(Copy, Clone, PartialEq, Debug)] #[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum ContentEncoding { pub enum ContentEncoding {
/// Automatically select encoding based on encoding negotiation. /// Automatically select encoding based on encoding negotiation.
Auto, Auto,
@ -37,7 +46,7 @@ impl ContentEncoding {
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto) matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
} }
/// Convert content encoding to string /// Convert content encoding to string.
#[inline] #[inline]
pub fn as_str(self) -> &'static str { pub fn as_str(self) -> &'static str {
match self { match self {
@ -48,18 +57,6 @@ impl ContentEncoding {
ContentEncoding::Identity | ContentEncoding::Auto => "identity", ContentEncoding::Identity | ContentEncoding::Auto => "identity",
} }
} }
/// Default Q-factor (quality) value.
#[inline]
pub fn quality(self) -> f64 {
match self {
ContentEncoding::Br => 1.1,
ContentEncoding::Gzip => 1.0,
ContentEncoding::Deflate => 0.9,
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
ContentEncoding::Zstd => 0.0,
}
}
} }
impl Default for ContentEncoding { impl Default for ContentEncoding {
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
} }
impl FromStr for ContentEncoding { impl FromStr for ContentEncoding {
type Err = Infallible; type Err = ContentEncodingParseError;
fn from_str(val: &str) -> Result<Self, Self::Err> { fn from_str(val: &str) -> Result<Self, Self::Err> {
Ok(Self::from(val))
}
}
impl From<&str> for ContentEncoding {
fn from(val: &str) -> ContentEncoding {
let val = val.trim(); let val = val.trim();
if val.eq_ignore_ascii_case("br") { if val.eq_ignore_ascii_case("br") {
ContentEncoding::Br Ok(ContentEncoding::Br)
} else if val.eq_ignore_ascii_case("gzip") { } else if val.eq_ignore_ascii_case("gzip") {
ContentEncoding::Gzip Ok(ContentEncoding::Gzip)
} else if val.eq_ignore_ascii_case("deflate") { } else if val.eq_ignore_ascii_case("deflate") {
ContentEncoding::Deflate Ok(ContentEncoding::Deflate)
} else if val.eq_ignore_ascii_case("zstd") { } else if val.eq_ignore_ascii_case("zstd") {
ContentEncoding::Zstd Ok(ContentEncoding::Zstd)
} else { } else {
ContentEncoding::default() Err(ContentEncodingParseError)
} }
} }
} }
impl TryFrom<&str> for ContentEncoding {
type Error = ContentEncodingParseError;
fn try_from(val: &str) -> Result<Self, Self::Error> {
val.parse()
}
}
impl IntoHeaderValue for ContentEncoding { impl IntoHeaderValue for ContentEncoding {
type Error = InvalidHeaderValue; type Error = InvalidHeaderValue;

View File

@ -0,0 +1,82 @@
use std::{fmt, io::Write, str::FromStr, time::SystemTime};
use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue};
use crate::{
config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue,
helpers::MutWriter,
};
/// A timestamp with HTTP formatting and parsing.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(SystemTime);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match httpdate::parse_http_date(s) {
Ok(sys_time) => Ok(HttpDate(sys_time)),
Err(_) => Err(ParseError::Header),
}
}
}
impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let date_str = httpdate::fmt_http_date(self.0);
f.write_str(&date_str)
}
}
impl IntoHeaderValue for HttpDate {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH);
let mut wrt = MutWriter(&mut buf);
// unwrap: date output is known to be well formed and of known length
write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap();
HeaderValue::from_maybe_shared(buf.split().freeze())
}
}
impl From<SystemTime> for HttpDate {
fn from(sys_time: SystemTime) -> HttpDate {
HttpDate(sys_time)
}
}
impl From<HttpDate> for SystemTime {
fn from(HttpDate(sys_time): HttpDate) -> SystemTime {
sys_time
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::*;
#[test]
fn date_header() {
macro_rules! assert_parsed_date {
($case:expr, $exp:expr) => {
assert_eq!($case.parse::<HttpDate>().unwrap(), $exp);
};
}
// 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH));
let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117));
assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07);
assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07);
assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}
}

View File

@ -1,97 +0,0 @@
use std::{
fmt,
io::Write,
str::FromStr,
time::{SystemTime, UNIX_EPOCH},
};
use bytes::buf::BufMut;
use bytes::BytesMut;
use http::header::{HeaderValue, InvalidHeaderValue};
use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset};
use crate::error::ParseError;
use crate::header::IntoHeaderValue;
use crate::time_parser;
/// A timestamp with HTTP formatting and parsing.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct HttpDate(OffsetDateTime);
impl FromStr for HttpDate {
type Err = ParseError;
fn from_str(s: &str) -> Result<HttpDate, ParseError> {
match time_parser::parse_http_date(s) {
Some(t) => Ok(HttpDate(t.assume_utc())),
None => Err(ParseError::Header),
}
}
}
impl fmt::Display for HttpDate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f)
}
}
impl From<SystemTime> for HttpDate {
fn from(sys: SystemTime) -> HttpDate {
HttpDate(PrimitiveDateTime::from(sys).assume_utc())
}
}
impl IntoHeaderValue for HttpDate {
type Error = InvalidHeaderValue;
fn try_into_value(self) -> Result<HeaderValue, Self::Error> {
let mut wrt = BytesMut::with_capacity(29).writer();
write!(
wrt,
"{}",
self.0
.to_offset(UtcOffset::UTC)
.format("%a, %d %b %Y %H:%M:%S GMT")
)
.unwrap();
HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze())
}
}
impl From<HttpDate> for SystemTime {
fn from(date: HttpDate) -> SystemTime {
let dt = date.0;
let epoch = OffsetDateTime::unix_epoch();
UNIX_EPOCH + (dt - epoch)
}
}
#[cfg(test)]
mod tests {
use super::HttpDate;
use time::{date, time, PrimitiveDateTime};
#[test]
fn test_date() {
let nov_07 = HttpDate(
PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(),
);
assert_eq!(
"Sun, 07 Nov 1994 08:48:37 GMT".parse::<HttpDate>().unwrap(),
nov_07
);
assert_eq!(
"Sunday, 07-Nov-94 08:48:37 GMT"
.parse::<HttpDate>()
.unwrap(),
nov_07
);
assert_eq!(
"Sun Nov 7 08:48:37 1994".parse::<HttpDate>().unwrap(),
nov_07
);
assert!("this-is-no-date".parse::<HttpDate>().is_err());
}
}

View File

@ -3,12 +3,12 @@
mod charset; mod charset;
mod content_encoding; mod content_encoding;
mod extended; mod extended;
mod httpdate; mod http_date;
mod quality_item; mod quality_item;
pub use self::charset::Charset; pub use self::charset::Charset;
pub use self::content_encoding::ContentEncoding; pub use self::content_encoding::ContentEncoding;
pub use self::extended::{parse_extended_value, ExtendedValue}; pub use self::extended::{parse_extended_value, ExtendedValue};
pub use self::httpdate::HttpDate; pub use self::http_date::HttpDate;
pub use self::quality_item::{q, qitem, Quality, QualityItem}; pub use self::quality_item::{q, qitem, Quality, QualityItem};
pub use language_tags::LanguageTag; pub use language_tags::LanguageTag;

View File

@ -1,11 +1,14 @@
use std::{ use std::{
cmp, cmp,
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
fmt, str, fmt,
str::{self, FromStr},
}; };
use derive_more::{Display, Error}; use derive_more::{Display, Error};
use crate::error::ParseError;
const MAX_QUALITY: u16 = 1000; const MAX_QUALITY: u16 = 1000;
const MAX_FLOAT_QUALITY: f32 = 1.0; const MAX_FLOAT_QUALITY: f32 = 1.0;
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
} }
} }
impl<T: str::FromStr> str::FromStr for QualityItem<T> { impl<T: FromStr> FromStr for QualityItem<T> {
type Err = crate::error::ParseError; type Err = ParseError;
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> { fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
if !qitem_str.is_ascii() { if !qitem_str.is_ascii() {
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
// Set defaults used if parsing fails. // Set defaults used if parsing fails.
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
if parts[0].len() < 2 { if parts[0].len() < 2 {
// Can't possibly be an attribute since an attribute needs at least a name followed // Can't possibly be an attribute since an attribute needs at least a name followed
// by an equals sign. And bare identifiers are forbidden. // by an equals sign. And bare identifiers are forbidden.
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
let start = &parts[0][0..2]; let start = &parts[0][0..2];
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
let q_val = &parts[0][2..]; let q_val = &parts[0][2..];
if q_val.len() > 5 { if q_val.len() > 5 {
// longer than 5 indicates an over-precise q-factor // longer than 5 indicates an over-precise q-factor
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
let q_value = q_val let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
.parse::<f32>()
.map_err(|_| crate::error::ParseError::Header)?;
if (0f32..=1f32).contains(&q_value) { if (0f32..=1f32).contains(&q_value) {
quality = q_value; quality = q_value;
raw_item = parts[1]; raw_item = parts[1];
} else { } else {
return Err(crate::error::ParseError::Header); return Err(ParseError::Header);
} }
} }
} }
let item = raw_item let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
.parse::<T>()
.map_err(|_| crate::error::ParseError::Header)?;
// we already checked above that the quality is within range // we already checked above that the quality is within range
Ok(QualityItem::new(item, Quality::from_f32(quality))) Ok(QualityItem::new(item, Quality::from_f32(quality)))
@ -224,7 +223,7 @@ mod tests {
} }
} }
impl str::FromStr for Encoding { impl FromStr for Encoding {
type Err = crate::error::ParseError; type Err = crate::error::ParseError;
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> { fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
use Encoding::*; use Encoding::*;

View File

@ -14,7 +14,7 @@
//! [rustls]: https://crates.io/crates/rustls //! [rustls]: https://crates.io/crates/rustls
//! [trust-dns]: https://crates.io/crates/trust-dns //! [trust-dns]: https://crates.io/crates/trust-dns
#![deny(rust_2018_idioms, nonstandard_style)] #![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)]
#![allow( #![allow(
clippy::type_complexity, clippy::type_complexity,
clippy::too_many_arguments, clippy::too_many_arguments,
@ -44,7 +44,6 @@ mod request;
mod response; mod response;
mod response_builder; mod response_builder;
mod service; mod service;
mod time_parser;
pub mod error; pub mod error;
pub mod h1; pub mod h1;

View File

@ -209,7 +209,7 @@ impl RequestHeadType {
impl AsRef<RequestHead> for RequestHeadType { impl AsRef<RequestHead> for RequestHeadType {
fn as_ref(&self) -> &RequestHead { fn as_ref(&self) -> &RequestHead {
match self { match self {
RequestHeadType::Owned(head) => &head, RequestHeadType::Owned(head) => head,
RequestHeadType::Rc(head, _) => head.as_ref(), RequestHeadType::Rc(head, _) => head.as_ref(),
} }
} }
@ -363,7 +363,7 @@ impl<T: Head> std::ops::Deref for Message<T> {
type Target = T; type Target = T;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.head.as_ref() self.head.as_ref()
} }
} }

View File

@ -1,72 +0,0 @@
use time::{Date, OffsetDateTime, PrimitiveDateTime};
/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime.
pub(crate) fn parse_http_date(time: &str) -> Option<PrimitiveDateTime> {
try_parse_rfc_1123(time)
.or_else(|| try_parse_rfc_850(time))
.or_else(|| try_parse_asctime(time))
}
/// Attempt to parse a `time` string as a RFC 1123 formatted date time string.
///
/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT`
fn try_parse_rfc_1123(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a, %d %b %Y %H:%M:%S").ok()
}
/// Attempt to parse a `time` string as a RFC 850 formatted date time string.
///
/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC`
fn try_parse_rfc_850(time: &str) -> Option<PrimitiveDateTime> {
let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?;
// If the `time` string contains a two-digit year, then as per RFC 2616 § 19.3,
// we consider the year as part of this century if it's within the next 50 years,
// otherwise we consider as part of the previous century.
let now = OffsetDateTime::now_utc();
let century_start_year = (now.year() / 100) * 100;
let mut expanded_year = century_start_year + dt.year();
if expanded_year > now.year() + 50 {
expanded_year -= 100;
}
let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?;
Some(PrimitiveDateTime::new(date, dt.time()))
}
/// Attempt to parse a `time` string using ANSI C's `asctime` format.
///
/// Eg: `Wed Feb 13 15:46:11 2013`
fn try_parse_asctime(time: &str) -> Option<PrimitiveDateTime> {
time::parse(time, "%a %b %_d %H:%M:%S %Y").ok()
}
#[cfg(test)]
mod tests {
use time::{date, time};
use super::*;
#[test]
fn test_rfc_850_year_shift() {
let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap();
assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap();
assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap();
assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap();
assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap();
assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41)));
let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap();
assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41)));
}
}

View File

@ -183,6 +183,7 @@ async fn test_chunked_payload() {
Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(), Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(),
None => panic!("Failed to find size in HTTP Response: {}", data), None => panic!("Failed to find size in HTTP Response: {}", data),
}; };
size size
}; };

View File

@ -3,6 +3,10 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.4.0-beta.6 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.4.0-beta.5 - 2021-06-17 ## 0.4.0-beta.5 - 2021-06-17
* No notable changes. * No notable changes.

View File

@ -1,13 +1,11 @@
[package] [package]
name = "actix-multipart" name = "actix-multipart"
version = "0.4.0-beta.5" version = "0.4.0-beta.6"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Multipart form support for Actix Web" description = "Multipart form support for Actix Web"
readme = "README.md"
keywords = ["http", "web", "framework", "async", "futures"] keywords = ["http", "web", "framework", "async", "futures"]
homepage = "https://actix.rs" homepage = "https://actix.rs"
repository = "https://github.com/actix/actix-web.git" repository = "https://github.com/actix/actix-web.git"
documentation = "https://docs.rs/actix-multipart"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
edition = "2018" edition = "2018"
@ -16,7 +14,7 @@ name = "actix_multipart"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.9", default-features = false }
actix-utils = "3.0.0" actix-utils = "3.0.0"
bytes = "1" bytes = "1"
@ -31,6 +29,6 @@ twoway = "0.2"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
tokio = { version = "1", features = ["sync"] } tokio = { version = "1", features = ["sync"] }
tokio-stream = "0.1" tokio-stream = "0.1"

View File

@ -3,15 +3,15 @@
> Multipart form support for Actix Web. > Multipart form support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart)
[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.5)](https://docs.rs/actix-multipart/0.4.0-beta.5) [![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.6)](https://docs.rs/actix-multipart/0.4.0-beta.6)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.5/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.5) [![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.6/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.6)
[![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart) [![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-multipart) - [API Documentation](https://docs.rs/actix-multipart)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.51.0

View File

@ -33,7 +33,6 @@ use crate::server::Multipart;
impl FromRequest for Multipart { impl FromRequest for Multipart {
type Error = Error; type Error = Error;
type Future = Ready<Result<Multipart, Error>>; type Future = Ready<Result<Multipart, Error>>;
type Config = ();
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {

131
actix-router/CHANGES.md Normal file
View File

@ -0,0 +1,131 @@
# Changes
## Unreleased - 2021-xx-xx
## 0.5.0-beta.2 - 2021-09-09
* Introduce `ResourceDef::join`. [#380]
* Disallow prefix routes with tail segments. [#379]
* Enforce path separators on dynamic prefixes. [#378]
* Improve malformed path error message. [#384]
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
* Support multi-pattern prefixes and joins. [#2356]
* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356]
* Support `build_resource_path` on multi-pattern resources. [#2356]
* Minimum supported Rust version (MSRV) is now 1.51.
[#378]: https://github.com/actix/actix-net/pull/378
[#379]: https://github.com/actix/actix-net/pull/379
[#380]: https://github.com/actix/actix-net/pull/380
[#384]: https://github.com/actix/actix-net/pull/384
[#2355]: https://github.com/actix/actix-web/pull/2355
[#2356]: https://github.com/actix/actix-web/pull/2356
## 0.5.0-beta.1 - 2021-07-20
* Fix a bug in multi-patterns where static patterns are interpreted as regex. [#366]
* Introduce `ResourceDef::pattern_iter` to get an iterator over all patterns in a multi-pattern resource. [#373]
* Fix segment interpolation leaving `Path` in unintended state after matching. [#368]
* Fix `ResourceDef` `PartialEq` implementation. [#373]
* Re-work `IntoPatterns` trait, adding a `Patterns` enum. [#372]
* Implement `IntoPatterns` for `bytestring::ByteString`. [#372]
* Rename `Path::{len => segment_count}` to be more descriptive of it's purpose. [#370]
* Rename `ResourceDef::{resource_path => resource_path_from_iter}`. [#371]
* `ResourceDef::resource_path_from_iter` now takes an `IntoIterator`. [#373]
* Rename `ResourceDef::{resource_path_named => resource_path_from_map}`. [#371]
* Rename `ResourceDef::{is_prefix_match => find_match}`. [#373]
* Rename `ResourceDef::{match_path => capture_match_info}`. [#373]
* Rename `ResourceDef::{match_path_checked => capture_match_info_fn}`. [#373]
* Remove `ResourceDef::name_mut` and introduce `ResourceDef::set_name`. [#373]
* Rename `Router::{*_checked => *_fn}`. [#373]
* Return type of `ResourceDef::name` is now `Option<&str>`. [#373]
* Return type of `ResourceDef::pattern` is now `Option<&str>`. [#373]
[#368]: https://github.com/actix/actix-net/pull/368
[#366]: https://github.com/actix/actix-net/pull/366
[#368]: https://github.com/actix/actix-net/pull/368
[#370]: https://github.com/actix/actix-net/pull/370
[#371]: https://github.com/actix/actix-net/pull/371
[#372]: https://github.com/actix/actix-net/pull/372
[#373]: https://github.com/actix/actix-net/pull/373
## 0.4.0 - 2021-06-06
* When matching path parameters, `%25` is now kept in the percent-encoded form; no longer decoded to `%`. [#357]
* Path tail patterns now match new lines (`\n`) in request URL. [#360]
* Fixed a safety bug where `Path` could return a malformed string after percent decoding. [#359]
* Methods `Path::{add, add_static}` now take `impl Into<Cow<'static, str>>`. [#345]
[#345]: https://github.com/actix/actix-net/pull/345
[#357]: https://github.com/actix/actix-net/pull/357
[#359]: https://github.com/actix/actix-net/pull/359
[#360]: https://github.com/actix/actix-net/pull/360
## 0.3.0 - 2019-12-31
* Version was yanked previously. See https://crates.io/crates/actix-router/0.3.0
## 0.2.7 - 2021-02-06
* Add `Router::recognize_checked` [#247]
[#247]: https://github.com/actix/actix-net/pull/247
## 0.2.6 - 2021-01-09
* Use `bytestring` version range compatible with Bytes v1.0. [#246]
[#246]: https://github.com/actix/actix-net/pull/246
## 0.2.5 - 2020-09-20
* Fix `from_hex()` method
## 0.2.4 - 2019-12-31
* Add `ResourceDef::resource_path_named()` path generation method
## 0.2.3 - 2019-12-25
* Add impl `IntoPattern` for `&String`
## 0.2.2 - 2019-12-25
* Use `IntoPattern` for `RouterBuilder::path()`
## 0.2.1 - 2019-12-25
* Add `IntoPattern` trait
* Add multi-pattern resources
## 0.2.0 - 2019-12-07
* Update http to 0.2
* Update regex to 1.3
* Use bytestring instead of string
## 0.1.5 - 2019-05-15
* Remove debug prints
## 0.1.4 - 2019-05-15
* Fix checked resource match
## 0.1.3 - 2019-04-22
* Added support for `remainder match` (i.e "/path/{tail}*")
## 0.1.2 - 2019-04-07
* Export `Quoter` type
* Allow to reset `Path` instance
## 0.1.1 - 2019-04-03
* Get dynamic segment by name instead of iterator.
## 0.1.0 - 2019-03-09
* Initial release

38
actix-router/Cargo.toml Normal file
View File

@ -0,0 +1,38 @@
[package]
name = "actix-router"
version = "0.5.0-beta.2"
authors = [
"Nikolay Kim <fafhrd91@gmail.com>",
"Ali MJ Al-Nasrawy <alimjalnasrawy@gmail.com>",
"Rob Ede <robjtede@icloud.com>",
]
description = "Resource path matching and router"
keywords = ["actix", "router", "routing"]
repository = "https://github.com/actix/actix-web.git"
license = "MIT OR Apache-2.0"
edition = "2018"
[lib]
name = "actix_router"
path = "src/lib.rs"
[features]
default = ["http"]
[dependencies]
bytestring = ">=0.1.5, <2"
firestorm = "0.4"
http = { version = "0.2.3", optional = true }
log = "0.4"
regex = "1.5"
serde = "1"
[dev-dependencies]
criterion = { version = "0.3", features = ["html_reports"] }
firestorm = { version = "0.4", features = ["enable_system_time"] }
http = "0.2.3"
serde = { version = "1", features = ["derive"] }
[[bench]]
name = "router"
harness = false

1
actix-router/LICENSE-APACHE Symbolic link
View File

@ -0,0 +1 @@
../LICENSE-APACHE

1
actix-router/LICENSE-MIT Symbolic link
View File

@ -0,0 +1 @@
../LICENSE-MIT

View File

@ -0,0 +1,194 @@
//! Based on https://github.com/ibraheemdev/matchit/blob/master/benches/bench.rs
use criterion::{black_box, criterion_group, criterion_main, Criterion};
macro_rules! register {
(colon) => {{
register!(finish => ":p1", ":p2", ":p3", ":p4")
}};
(brackets) => {{
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
}};
(regex) => {{
register!(finish => "(.*)", "(.*)", "(.*)", "(.*)")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),
concat!("/applications/", $p1, "/tokens/", $p2),
concat!("/events"),
concat!("/repos/", $p1, "/", $p2, "/events"),
concat!("/networks/", $p1, "/", $p2, "/events"),
concat!("/orgs/", $p1, "/events"),
concat!("/users/", $p1, "/received_events"),
concat!("/users/", $p1, "/received_events/public"),
concat!("/users/", $p1, "/events"),
concat!("/users/", $p1, "/events/public"),
concat!("/users/", $p1, "/events/orgs/", $p2),
concat!("/feeds"),
concat!("/notifications"),
concat!("/repos/", $p1, "/", $p2, "/notifications"),
concat!("/notifications/threads/", $p1),
concat!("/notifications/threads/", $p1, "/subscription"),
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
concat!("/users/", $p1, "/starred"),
concat!("/user/starred"),
concat!("/user/starred/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
concat!("/users/", $p1, "/subscriptions"),
concat!("/user/subscriptions"),
concat!("/repos/", $p1, "/", $p2, "/subscription"),
concat!("/user/subscriptions/", $p1, "/", $p2),
concat!("/users/", $p1, "/gists"),
concat!("/gists"),
concat!("/gists/", $p1),
concat!("/gists/", $p1, "/star"),
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
concat!("/issues"),
concat!("/user/issues"),
concat!("/orgs/", $p1, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
concat!("/repos/", $p1, "/", $p2, "/assignees"),
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
concat!("/repos/", $p1, "/", $p2, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
concat!("/emojis"),
concat!("/gitignore/templates"),
concat!("/gitignore/templates/", $p1),
concat!("/meta"),
concat!("/rate_limit"),
concat!("/users/", $p1, "/orgs"),
concat!("/user/orgs"),
concat!("/orgs/", $p1),
concat!("/orgs/", $p1, "/members"),
concat!("/orgs/", $p1, "/members", $p2),
concat!("/orgs/", $p1, "/public_members"),
concat!("/orgs/", $p1, "/public_members/", $p2),
concat!("/orgs/", $p1, "/teams"),
concat!("/teams/", $p1),
concat!("/teams/", $p1, "/members"),
concat!("/teams/", $p1, "/members", $p2),
concat!("/teams/", $p1, "/repos"),
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
concat!("/user/teams"),
concat!("/repos/", $p1, "/", $p2, "/pulls"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
concat!("/user/repos"),
concat!("/users/", $p1, "/repos"),
concat!("/orgs/", $p1, "/repos"),
concat!("/repositories"),
concat!("/repos/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/contributors"),
concat!("/repos/", $p1, "/", $p2, "/languages"),
concat!("/repos/", $p1, "/", $p2, "/teams"),
concat!("/repos/", $p1, "/", $p2, "/tags"),
concat!("/repos/", $p1, "/", $p2, "/branches"),
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
concat!("/repos/", $p1, "/", $p2, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/readme"),
concat!("/repos/", $p1, "/", $p2, "/keys"),
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
concat!("/repos/", $p1, "/", $p2, "/downloads"),
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
concat!("/repos/", $p1, "/", $p2, "/forks"),
concat!("/repos/", $p1, "/", $p2, "/hooks"),
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases"),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
concat!("/search/repositories"),
concat!("/search/code"),
concat!("/search/issues"),
concat!("/search/users"),
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
concat!("/legacy/repos/search/", $p1),
concat!("/legacy/user/search/", $p1),
concat!("/legacy/user/email/", $p1),
concat!("/users/", $p1),
concat!("/user"),
concat!("/users"),
concat!("/user/emails"),
concat!("/users/", $p1, "/followers"),
concat!("/user/followers"),
concat!("/users/", $p1, "/following"),
concat!("/user/following"),
concat!("/user/following/", $p1),
concat!("/users/", $p1, "/following", $p2),
concat!("/users/", $p1, "/keys"),
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
std::array::IntoIter::new(arr)
}};
}
fn call() -> impl Iterator<Item = &'static str> {
let arr = [
"/authorizations",
"/user/repos",
"/repos/rust-lang/rust/stargazers",
"/orgs/rust-lang/public_members/nikomatsakis",
"/repos/rust-lang/rust/releases/1.51.0",
];
std::array::IntoIter::new(arr)
}
fn compare_routers(c: &mut Criterion) {
let mut group = c.benchmark_group("Compare Routers");
let mut actix = actix_router::Router::<bool>::build();
for route in register!(brackets) {
actix.path(route, true);
}
let actix = actix.finish();
group.bench_function("actix", |b| {
b.iter(|| {
for route in call() {
let mut path = actix_router::Path::new(route);
black_box(actix.recognize(&mut path).unwrap());
}
});
});
let regex_set = regex::RegexSet::new(register!(regex)).unwrap();
group.bench_function("regex", |b| {
b.iter(|| {
for route in call() {
black_box(regex_set.matches(route));
}
});
});
group.finish();
}
criterion_group!(benches, compare_routers);
criterion_main!(benches);

View File

@ -0,0 +1,169 @@
macro_rules! register {
(brackets) => {{
register!(finish => "{p1}", "{p2}", "{p3}", "{p4}")
}};
(finish => $p1:literal, $p2:literal, $p3:literal, $p4:literal) => {{
let arr = [
concat!("/authorizations"),
concat!("/authorizations/", $p1),
concat!("/applications/", $p1, "/tokens/", $p2),
concat!("/events"),
concat!("/repos/", $p1, "/", $p2, "/events"),
concat!("/networks/", $p1, "/", $p2, "/events"),
concat!("/orgs/", $p1, "/events"),
concat!("/users/", $p1, "/received_events"),
concat!("/users/", $p1, "/received_events/public"),
concat!("/users/", $p1, "/events"),
concat!("/users/", $p1, "/events/public"),
concat!("/users/", $p1, "/events/orgs/", $p2),
concat!("/feeds"),
concat!("/notifications"),
concat!("/repos/", $p1, "/", $p2, "/notifications"),
concat!("/notifications/threads/", $p1),
concat!("/notifications/threads/", $p1, "/subscription"),
concat!("/repos/", $p1, "/", $p2, "/stargazers"),
concat!("/users/", $p1, "/starred"),
concat!("/user/starred"),
concat!("/user/starred/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/subscribers"),
concat!("/users/", $p1, "/subscriptions"),
concat!("/user/subscriptions"),
concat!("/repos/", $p1, "/", $p2, "/subscription"),
concat!("/user/subscriptions/", $p1, "/", $p2),
concat!("/users/", $p1, "/gists"),
concat!("/gists"),
concat!("/gists/", $p1),
concat!("/gists/", $p1, "/star"),
concat!("/repos/", $p1, "/", $p2, "/git/blobs/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/refs"),
concat!("/repos/", $p1, "/", $p2, "/git/tags/", $p3),
concat!("/repos/", $p1, "/", $p2, "/git/trees/", $p3),
concat!("/issues"),
concat!("/user/issues"),
concat!("/orgs/", $p1, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3),
concat!("/repos/", $p1, "/", $p2, "/assignees"),
concat!("/repos/", $p1, "/", $p2, "/assignees/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/events"),
concat!("/repos/", $p1, "/", $p2, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/labels/", $p3),
concat!("/repos/", $p1, "/", $p2, "/issues/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3, "/labels"),
concat!("/repos/", $p1, "/", $p2, "/milestones/"),
concat!("/repos/", $p1, "/", $p2, "/milestones/", $p3),
concat!("/emojis"),
concat!("/gitignore/templates"),
concat!("/gitignore/templates/", $p1),
concat!("/meta"),
concat!("/rate_limit"),
concat!("/users/", $p1, "/orgs"),
concat!("/user/orgs"),
concat!("/orgs/", $p1),
concat!("/orgs/", $p1, "/members"),
concat!("/orgs/", $p1, "/members", $p2),
concat!("/orgs/", $p1, "/public_members"),
concat!("/orgs/", $p1, "/public_members/", $p2),
concat!("/orgs/", $p1, "/teams"),
concat!("/teams/", $p1),
concat!("/teams/", $p1, "/members"),
concat!("/teams/", $p1, "/members", $p2),
concat!("/teams/", $p1, "/repos"),
concat!("/teams/", $p1, "/repos/", $p2, "/", $p3),
concat!("/user/teams"),
concat!("/repos/", $p1, "/", $p2, "/pulls"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/files"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/merge"),
concat!("/repos/", $p1, "/", $p2, "/pulls/", $p3, "/comments"),
concat!("/user/repos"),
concat!("/users/", $p1, "/repos"),
concat!("/orgs/", $p1, "/repos"),
concat!("/repositories"),
concat!("/repos/", $p1, "/", $p2),
concat!("/repos/", $p1, "/", $p2, "/contributors"),
concat!("/repos/", $p1, "/", $p2, "/languages"),
concat!("/repos/", $p1, "/", $p2, "/teams"),
concat!("/repos/", $p1, "/", $p2, "/tags"),
concat!("/repos/", $p1, "/", $p2, "/branches"),
concat!("/repos/", $p1, "/", $p2, "/branches/", $p3),
concat!("/repos/", $p1, "/", $p2, "/collaborators"),
concat!("/repos/", $p1, "/", $p2, "/collaborators/", $p3),
concat!("/repos/", $p1, "/", $p2, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3, "/comments"),
concat!("/repos/", $p1, "/", $p2, "/commits"),
concat!("/repos/", $p1, "/", $p2, "/commits/", $p3),
concat!("/repos/", $p1, "/", $p2, "/readme"),
concat!("/repos/", $p1, "/", $p2, "/keys"),
concat!("/repos/", $p1, "/", $p2, "/keys", $p3),
concat!("/repos/", $p1, "/", $p2, "/downloads"),
concat!("/repos/", $p1, "/", $p2, "/downloads", $p3),
concat!("/repos/", $p1, "/", $p2, "/forks"),
concat!("/repos/", $p1, "/", $p2, "/hooks"),
concat!("/repos/", $p1, "/", $p2, "/hooks", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases"),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3),
concat!("/repos/", $p1, "/", $p2, "/releases/", $p3, "/assets"),
concat!("/repos/", $p1, "/", $p2, "/stats/contributors"),
concat!("/repos/", $p1, "/", $p2, "/stats/commit_activity"),
concat!("/repos/", $p1, "/", $p2, "/stats/code_frequency"),
concat!("/repos/", $p1, "/", $p2, "/stats/participation"),
concat!("/repos/", $p1, "/", $p2, "/stats/punch_card"),
concat!("/repos/", $p1, "/", $p2, "/statuses/", $p3),
concat!("/search/repositories"),
concat!("/search/code"),
concat!("/search/issues"),
concat!("/search/users"),
concat!("/legacy/issues/search/", $p1, "/", $p2, "/", $p3, "/", $p4),
concat!("/legacy/repos/search/", $p1),
concat!("/legacy/user/search/", $p1),
concat!("/legacy/user/email/", $p1),
concat!("/users/", $p1),
concat!("/user"),
concat!("/users"),
concat!("/user/emails"),
concat!("/users/", $p1, "/followers"),
concat!("/user/followers"),
concat!("/users/", $p1, "/following"),
concat!("/user/following"),
concat!("/user/following/", $p1),
concat!("/users/", $p1, "/following", $p2),
concat!("/users/", $p1, "/keys"),
concat!("/user/keys"),
concat!("/user/keys/", $p1),
];
arr.to_vec()
}};
}
static PATHS: [&str; 5] = [
"/authorizations",
"/user/repos",
"/repos/rust-lang/rust/stargazers",
"/orgs/rust-lang/public_members/nikomatsakis",
"/repos/rust-lang/rust/releases/1.51.0",
];
fn main() {
let mut router = actix_router::Router::<bool>::build();
for route in register!(brackets) {
router.path(route, true);
}
let actix = router.finish();
if firestorm::enabled() {
firestorm::bench("target", || {
for &route in &PATHS {
let mut path = actix_router::Path::new(route);
actix.recognize(&mut path).unwrap();
}
})
.unwrap();
}
}

723
actix-router/src/de.rs Normal file
View File

@ -0,0 +1,723 @@
use serde::de::{self, Deserializer, Error as DeError, Visitor};
use serde::forward_to_deserialize_any;
use crate::path::{Path, PathIter};
use crate::ResourcePath;
macro_rules! unsupported_type {
($trait_fn:ident, $name:expr) => {
fn $trait_fn<V>(self, _: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom(concat!(
"unsupported type: ",
$name
)))
}
};
}
macro_rules! parse_single_value {
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() != 1 {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected 1",
self.path.segment_count()
)
.as_str(),
))
} else {
let v = self.path[0].parse().map_err(|_| {
de::value::Error::custom(format!(
"can not parse {:?} to a {}",
&self.path[0], $tp
))
})?;
visitor.$visit_fn(v)
}
}
};
}
pub struct PathDeserializer<'de, T: ResourcePath> {
path: &'de Path<T>,
}
impl<'de, T: ResourcePath + 'de> PathDeserializer<'de, T> {
pub fn new(path: &'de Path<T>) -> Self {
PathDeserializer { path }
}
}
impl<'de, T: ResourcePath + 'de> Deserializer<'de> for PathDeserializer<'de, T> {
type Error = de::value::Error;
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_map(ParamsDeserializer {
params: self.path.iter(),
current: None,
})
}
fn deserialize_struct<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_map(visitor)
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_unit(visitor)
}
fn deserialize_newtype_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() < len {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected {}",
self.path.segment_count(),
len
)
.as_str(),
))
} else {
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
}
fn deserialize_tuple_struct<V>(
self,
_: &'static str,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() < len {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected {}",
self.path.segment_count(),
len
)
.as_str(),
))
} else {
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
}
fn deserialize_enum<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.is_empty() {
Err(de::value::Error::custom("expected at least one parameters"))
} else {
visitor.visit_enum(ValueEnum {
value: &self.path[0],
})
}
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.path.segment_count() != 1 {
Err(de::value::Error::custom(
format!(
"wrong number of parameters: {} expected 1",
self.path.segment_count()
)
.as_str(),
))
} else {
visitor.visit_str(&self.path[0])
}
}
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_seq(ParamsSeq {
params: self.path.iter(),
})
}
unsupported_type!(deserialize_any, "'any'");
unsupported_type!(deserialize_bytes, "bytes");
unsupported_type!(deserialize_option, "Option<T>");
unsupported_type!(deserialize_identifier, "identifier");
unsupported_type!(deserialize_ignored_any, "ignored_any");
parse_single_value!(deserialize_bool, visit_bool, "bool");
parse_single_value!(deserialize_i8, visit_i8, "i8");
parse_single_value!(deserialize_i16, visit_i16, "i16");
parse_single_value!(deserialize_i32, visit_i32, "i32");
parse_single_value!(deserialize_i64, visit_i64, "i64");
parse_single_value!(deserialize_u8, visit_u8, "u8");
parse_single_value!(deserialize_u16, visit_u16, "u16");
parse_single_value!(deserialize_u32, visit_u32, "u32");
parse_single_value!(deserialize_u64, visit_u64, "u64");
parse_single_value!(deserialize_f32, visit_f32, "f32");
parse_single_value!(deserialize_f64, visit_f64, "f64");
parse_single_value!(deserialize_string, visit_string, "String");
parse_single_value!(deserialize_byte_buf, visit_string, "String");
parse_single_value!(deserialize_char, visit_char, "char");
}
struct ParamsDeserializer<'de, T: ResourcePath> {
params: PathIter<'de, T>,
current: Option<(&'de str, &'de str)>,
}
impl<'de, T: ResourcePath> de::MapAccess<'de> for ParamsDeserializer<'de, T> {
type Error = de::value::Error;
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
where
K: de::DeserializeSeed<'de>,
{
self.current = self.params.next().map(|ref item| (item.0, item.1));
match self.current {
Some((key, _)) => Ok(Some(seed.deserialize(Key { key })?)),
None => Ok(None),
}
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
where
V: de::DeserializeSeed<'de>,
{
if let Some((_, value)) = self.current.take() {
seed.deserialize(Value { value })
} else {
Err(de::value::Error::custom("unexpected item"))
}
}
}
struct Key<'de> {
key: &'de str,
}
impl<'de> Deserializer<'de> for Key<'de> {
type Error = de::value::Error;
fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_str(self.key)
}
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("Unexpected"))
}
forward_to_deserialize_any! {
bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
byte_buf option unit unit_struct newtype_struct seq tuple
tuple_struct map struct enum ignored_any
}
}
macro_rules! parse_value {
($trait_fn:ident, $visit_fn:ident, $tp:tt) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let v = self.value.parse().map_err(|_| {
de::value::Error::custom(format!("can not parse {:?} to a {}", self.value, $tp))
})?;
visitor.$visit_fn(v)
}
};
}
struct Value<'de> {
value: &'de str,
}
impl<'de> Deserializer<'de> for Value<'de> {
type Error = de::value::Error;
parse_value!(deserialize_bool, visit_bool, "bool");
parse_value!(deserialize_i8, visit_i8, "i8");
parse_value!(deserialize_i16, visit_i16, "i16");
parse_value!(deserialize_i32, visit_i32, "i16");
parse_value!(deserialize_i64, visit_i64, "i64");
parse_value!(deserialize_u8, visit_u8, "u8");
parse_value!(deserialize_u16, visit_u16, "u16");
parse_value!(deserialize_u32, visit_u32, "u32");
parse_value!(deserialize_u64, visit_u64, "u64");
parse_value!(deserialize_f32, visit_f32, "f32");
parse_value!(deserialize_f64, visit_f64, "f64");
parse_value!(deserialize_string, visit_string, "String");
parse_value!(deserialize_byte_buf, visit_string, "String");
parse_value!(deserialize_char, visit_char, "char");
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_borrowed_bytes(self.value.as_bytes())
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_borrowed_str(self.value)
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_some(self)
}
fn deserialize_enum<V>(
self,
_: &'static str,
_: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_enum(ValueEnum { value: self.value })
}
fn deserialize_newtype_struct<V>(
self,
_: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_tuple<V>(self, _: usize, _: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: tuple"))
}
fn deserialize_struct<V>(
self,
_: &'static str,
_: &'static [&'static str],
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: struct"))
}
fn deserialize_tuple_struct<V>(
self,
_: &'static str,
_: usize,
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("unsupported type: tuple struct"))
}
unsupported_type!(deserialize_any, "any");
unsupported_type!(deserialize_seq, "seq");
unsupported_type!(deserialize_map, "map");
unsupported_type!(deserialize_identifier, "identifier");
}
struct ParamsSeq<'de, T: ResourcePath> {
params: PathIter<'de, T>,
}
impl<'de, T: ResourcePath> de::SeqAccess<'de> for ParamsSeq<'de, T> {
type Error = de::value::Error;
fn next_element_seed<U>(&mut self, seed: U) -> Result<Option<U::Value>, Self::Error>
where
U: de::DeserializeSeed<'de>,
{
match self.params.next() {
Some(item) => Ok(Some(seed.deserialize(Value { value: item.1 })?)),
None => Ok(None),
}
}
}
struct ValueEnum<'de> {
value: &'de str,
}
impl<'de> de::EnumAccess<'de> for ValueEnum<'de> {
type Error = de::value::Error;
type Variant = UnitVariant;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
where
V: de::DeserializeSeed<'de>,
{
Ok((seed.deserialize(Key { key: self.value })?, UnitVariant))
}
}
struct UnitVariant;
impl<'de> de::VariantAccess<'de> for UnitVariant {
type Error = de::value::Error;
fn unit_variant(self) -> Result<(), Self::Error> {
Ok(())
}
fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
where
T: de::DeserializeSeed<'de>,
{
Err(de::value::Error::custom("not supported"))
}
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("not supported"))
}
fn struct_variant<V>(
self,
_: &'static [&'static str],
_: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(de::value::Error::custom("not supported"))
}
}
#[cfg(test)]
mod tests {
use serde::{de, Deserialize};
use super::*;
use crate::path::Path;
use crate::router::Router;
#[derive(Deserialize)]
struct MyStruct {
key: String,
value: String,
}
#[derive(Deserialize)]
struct Id {
_id: String,
}
#[derive(Debug, Deserialize)]
struct Test1(String, u32);
#[derive(Debug, Deserialize)]
struct Test2 {
key: String,
value: u32,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
enum TestEnum {
Val1,
Val2,
}
#[derive(Debug, Deserialize)]
struct Test3 {
val: TestEnum,
}
#[test]
fn test_request_extract() {
let mut router = Router::<()>::build();
router.path("/{key}/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/user1/");
assert!(router.recognize(&mut path).is_some());
let s: MyStruct = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.key, "name");
assert_eq!(s.value, "user1");
let s: (String, String) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, "user1");
let mut router = Router::<()>::build();
router.path("/{key}/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/32/");
assert!(router.recognize(&mut path).is_some());
let s: Test1 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, 32);
let s: Test2 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.key, "name");
assert_eq!(s.value, 32);
let s: (String, u8) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(s.0, "name");
assert_eq!(s.1, 32);
let res: Vec<String> =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(res[0], "name".to_owned());
assert_eq!(res[1], "32".to_owned());
}
#[test]
fn test_extract_path_single() {
let mut router = Router::<()>::build();
router.path("/{value}/", ());
let router = router.finish();
let mut path = Path::new("/32/");
assert!(router.recognize(&mut path).is_some());
let i: i8 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, 32);
}
#[test]
fn test_extract_enum() {
let mut router = Router::<()>::build();
router.path("/{val}/", ());
let router = router.finish();
let mut path = Path::new("/val1/");
assert!(router.recognize(&mut path).is_some());
let i: TestEnum = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, TestEnum::Val1);
let mut router = Router::<()>::build();
router.path("/{val1}/{val2}/", ());
let router = router.finish();
let mut path = Path::new("/val1/val2/");
assert!(router.recognize(&mut path).is_some());
let i: (TestEnum, TestEnum) =
de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i, (TestEnum::Val1, TestEnum::Val2));
}
#[test]
fn test_extract_enum_value() {
let mut router = Router::<()>::build();
router.path("/{val}/", ());
let router = router.finish();
let mut path = Path::new("/val1/");
assert!(router.recognize(&mut path).is_some());
let i: Test3 = de::Deserialize::deserialize(PathDeserializer::new(&path)).unwrap();
assert_eq!(i.val, TestEnum::Val1);
let mut path = Path::new("/val3/");
assert!(router.recognize(&mut path).is_some());
let i: Result<Test3, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(i.is_err());
assert!(format!("{:?}", i).contains("unknown variant"));
}
#[test]
fn test_extract_errors() {
let mut router = Router::<()>::build();
router.path("/{value}/", ());
let router = router.finish();
let mut path = Path::new("/name/");
assert!(router.recognize(&mut path).is_some());
let s: Result<Test1, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("wrong number of parameters"));
let s: Result<Test2, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("can not parse"));
let s: Result<(String, String), de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("wrong number of parameters"));
let s: Result<u32, de::value::Error> =
de::Deserialize::deserialize(PathDeserializer::new(&path));
assert!(s.is_err());
assert!(format!("{:?}", s).contains("can not parse"));
}
// #[test]
// fn test_extract_path_decode() {
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
// macro_rules! test_single_value {
// ($value:expr, $expected:expr) => {{
// let req = TestRequest::with_uri($value).finish();
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// assert_eq!(
// *Path::<String>::from_request(&req, &PathConfig::default()).unwrap(),
// $expected
// );
// }};
// }
// test_single_value!("/%25/", "%");
// test_single_value!("/%40%C2%A3%24%25%5E%26%2B%3D/", "@£$%^&+=");
// test_single_value!("/%2B/", "+");
// test_single_value!("/%252B/", "%2B");
// test_single_value!("/%2F/", "/");
// test_single_value!("/%252F/", "%2F");
// test_single_value!(
// "/http%3A%2F%2Flocalhost%3A80%2Ffoo/",
// "http://localhost:80/foo"
// );
// test_single_value!("/%2Fvar%2Flog%2Fsyslog/", "/var/log/syslog");
// test_single_value!(
// "/http%3A%2F%2Flocalhost%3A80%2Ffile%2F%252Fvar%252Flog%252Fsyslog/",
// "http://localhost:80/file/%2Fvar%2Flog%2Fsyslog"
// );
// let req = TestRequest::with_uri("/%25/7/?id=test").finish();
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{key}/{value}/")));
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// let s = Path::<Test2>::from_request(&req, &PathConfig::default()).unwrap();
// assert_eq!(s.key, "%");
// assert_eq!(s.value, 7);
// let s = Path::<(String, String)>::from_request(&req, &PathConfig::default()).unwrap();
// assert_eq!(s.0, "%");
// assert_eq!(s.1, "7");
// }
// #[test]
// fn test_extract_path_no_decode() {
// let mut router = Router::<()>::default();
// router.register_resource(Resource::new(ResourceDef::new("/{value}/")));
// let req = TestRequest::with_uri("/%25/").finish();
// let info = router.recognize(&req, &(), 0);
// let req = req.with_route_info(info);
// assert_eq!(
// *Path::<String>::from_request(&req, &&PathConfig::default().disable_decoding())
// .unwrap(),
// "%25"
// );
// }
}

149
actix-router/src/lib.rs Normal file
View File

@ -0,0 +1,149 @@
//! Resource path matching and router.
#![deny(rust_2018_idioms, nonstandard_style)]
#![doc(html_logo_url = "https://actix.rs/img/logo.png")]
#![doc(html_favicon_url = "https://actix.rs/favicon.ico")]
mod de;
mod path;
mod resource;
mod router;
pub use self::de::PathDeserializer;
pub use self::path::Path;
pub use self::resource::ResourceDef;
pub use self::router::{ResourceInfo, Router, RouterBuilder};
// TODO: this trait is necessary, document it
// see impl Resource for ServiceRequest
pub trait Resource<T: ResourcePath> {
fn resource_path(&mut self) -> &mut Path<T>;
}
pub trait ResourcePath {
fn path(&self) -> &str;
}
impl ResourcePath for String {
fn path(&self) -> &str {
self.as_str()
}
}
impl<'a> ResourcePath for &'a str {
fn path(&self) -> &str {
self
}
}
impl ResourcePath for bytestring::ByteString {
fn path(&self) -> &str {
&*self
}
}
/// One or many patterns.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Patterns {
Single(String),
List(Vec<String>),
}
impl Patterns {
pub fn is_empty(&self) -> bool {
match self {
Patterns::Single(_) => false,
Patterns::List(pats) => pats.is_empty(),
}
}
}
/// Helper trait for type that could be converted to one or more path pattern.
pub trait IntoPatterns {
fn patterns(&self) -> Patterns;
}
impl IntoPatterns for String {
fn patterns(&self) -> Patterns {
Patterns::Single(self.clone())
}
}
impl<'a> IntoPatterns for &'a String {
fn patterns(&self) -> Patterns {
Patterns::Single((*self).clone())
}
}
impl<'a> IntoPatterns for &'a str {
fn patterns(&self) -> Patterns {
Patterns::Single((*self).to_owned())
}
}
impl IntoPatterns for bytestring::ByteString {
fn patterns(&self) -> Patterns {
Patterns::Single(self.to_string())
}
}
impl IntoPatterns for Patterns {
fn patterns(&self) -> Patterns {
self.clone()
}
}
impl<T: AsRef<str>> IntoPatterns for Vec<T> {
fn patterns(&self) -> Patterns {
let mut patterns = self.iter().map(|v| v.as_ref().to_owned());
match patterns.size_hint() {
(1, _) => Patterns::Single(patterns.next().unwrap()),
_ => Patterns::List(patterns.collect()),
}
}
}
macro_rules! array_patterns_single (($tp:ty) => {
impl IntoPatterns for [$tp; 1] {
fn patterns(&self) -> Patterns {
Patterns::Single(self[0].to_owned())
}
}
});
macro_rules! array_patterns_multiple (($tp:ty, $str_fn:expr, $($num:tt) +) => {
// for each array length specified in $num
$(
impl IntoPatterns for [$tp; $num] {
fn patterns(&self) -> Patterns {
Patterns::List(self.iter().map($str_fn).collect())
}
}
)+
});
array_patterns_single!(&str);
array_patterns_multiple!(&str, |&v| v.to_owned(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
array_patterns_single!(String);
array_patterns_multiple!(String, |v| v.clone(), 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16);
#[cfg(feature = "http")]
mod url;
#[cfg(feature = "http")]
pub use self::url::{Quoter, Url};
#[cfg(feature = "http")]
mod http_impls {
use http::Uri;
use super::ResourcePath;
impl ResourcePath for Uri {
fn path(&self) -> &str {
self.path()
}
}
}

220
actix-router/src/path.rs Normal file
View File

@ -0,0 +1,220 @@
use std::borrow::Cow;
use std::ops::Index;
use firestorm::profile_method;
use serde::de;
use crate::{de::PathDeserializer, Resource, ResourcePath};
#[derive(Debug, Clone)]
pub(crate) enum PathItem {
Static(Cow<'static, str>),
Segment(u16, u16),
}
impl Default for PathItem {
fn default() -> Self {
Self::Static(Cow::Borrowed(""))
}
}
/// Resource path match information.
///
/// If resource path contains variable patterns, `Path` stores them.
#[derive(Debug, Clone, Default)]
pub struct Path<T> {
path: T,
pub(crate) skip: u16,
pub(crate) segments: Vec<(Cow<'static, str>, PathItem)>,
}
impl<T: ResourcePath> Path<T> {
pub fn new(path: T) -> Path<T> {
Path {
path,
skip: 0,
segments: Vec::new(),
}
}
/// Get reference to inner path instance.
#[inline]
pub fn get_ref(&self) -> &T {
&self.path
}
/// Get mutable reference to inner path instance.
#[inline]
pub fn get_mut(&mut self) -> &mut T {
&mut self.path
}
/// Path.
#[inline]
pub fn path(&self) -> &str {
profile_method!(path);
let skip = self.skip as usize;
let path = self.path.path();
if skip <= path.len() {
&path[skip..]
} else {
""
}
}
/// Set new path.
#[inline]
pub fn set(&mut self, path: T) {
self.skip = 0;
self.path = path;
self.segments.clear();
}
/// Reset state.
#[inline]
pub fn reset(&mut self) {
self.skip = 0;
self.segments.clear();
}
/// Skip first `n` chars in path.
#[inline]
pub fn skip(&mut self, n: u16) {
self.skip += n;
}
pub(crate) fn add(&mut self, name: impl Into<Cow<'static, str>>, value: PathItem) {
profile_method!(add);
match value {
PathItem::Static(s) => self.segments.push((name.into(), PathItem::Static(s))),
PathItem::Segment(begin, end) => self.segments.push((
name.into(),
PathItem::Segment(self.skip + begin, self.skip + end),
)),
}
}
#[doc(hidden)]
pub fn add_static(
&mut self,
name: impl Into<Cow<'static, str>>,
value: impl Into<Cow<'static, str>>,
) {
self.segments
.push((name.into(), PathItem::Static(value.into())));
}
/// Check if there are any matched patterns.
#[inline]
pub fn is_empty(&self) -> bool {
self.segments.is_empty()
}
/// Returns number of interpolated segments.
#[inline]
pub fn segment_count(&self) -> usize {
self.segments.len()
}
/// Get matched parameter by name without type conversion
pub fn get(&self, name: &str) -> Option<&str> {
profile_method!(get);
for (seg_name, val) in self.segments.iter() {
if name == seg_name {
return match val {
PathItem::Static(ref s) => Some(s),
PathItem::Segment(s, e) => {
Some(&self.path.path()[(*s as usize)..(*e as usize)])
}
};
}
}
None
}
/// Get unprocessed part of the path
pub fn unprocessed(&self) -> &str {
&self.path.path()[(self.skip as usize)..]
}
/// Get matched parameter by name.
///
/// If keyed parameter is not available empty string is used as default value.
pub fn query(&self, key: &str) -> &str {
profile_method!(query);
if let Some(s) = self.get(key) {
s
} else {
""
}
}
/// Return iterator to items in parameter container.
pub fn iter(&self) -> PathIter<'_, T> {
PathIter {
idx: 0,
params: self,
}
}
/// Try to deserialize matching parameters to a specified type `U`
pub fn load<'de, U: serde::Deserialize<'de>>(&'de self) -> Result<U, de::value::Error> {
profile_method!(load);
de::Deserialize::deserialize(PathDeserializer::new(self))
}
}
#[derive(Debug)]
pub struct PathIter<'a, T> {
idx: usize,
params: &'a Path<T>,
}
impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
type Item = (&'a str, &'a str);
#[inline]
fn next(&mut self) -> Option<(&'a str, &'a str)> {
if self.idx < self.params.segment_count() {
let idx = self.idx;
let res = match self.params.segments[idx].1 {
PathItem::Static(ref s) => s,
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
};
self.idx += 1;
return Some((&self.params.segments[idx].0, res));
}
None
}
}
impl<'a, T: ResourcePath> Index<&'a str> for Path<T> {
type Output = str;
fn index(&self, name: &'a str) -> &str {
self.get(name)
.expect("Value for parameter is not available")
}
}
impl<T: ResourcePath> Index<usize> for Path<T> {
type Output = str;
fn index(&self, idx: usize) -> &str {
match self.segments[idx].1 {
PathItem::Static(ref s) => s,
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
}
}
}
impl<T: ResourcePath> Resource<T> for Path<T> {
fn resource_path(&mut self) -> &mut Self {
self
}
}

1818
actix-router/src/resource.rs Normal file

File diff suppressed because it is too large Load Diff

281
actix-router/src/router.rs Normal file
View File

@ -0,0 +1,281 @@
use firestorm::profile_method;
use crate::{IntoPatterns, Resource, ResourceDef, ResourcePath};
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct ResourceId(pub u16);
/// Information about current resource
#[derive(Clone, Debug)]
pub struct ResourceInfo {
resource: ResourceId,
}
/// Resource router.
// T is the resource itself
// U is any other data needed for routing like method guards
pub struct Router<T, U = ()> {
routes: Vec<(ResourceDef, T, Option<U>)>,
}
impl<T, U> Router<T, U> {
pub fn build() -> RouterBuilder<T, U> {
RouterBuilder {
resources: Vec::new(),
}
}
pub fn recognize<R, P>(&self, resource: &mut R) -> Option<(&T, ResourceId)>
where
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize);
for item in self.routes.iter() {
if item.0.capture_match_info(resource.resource_path()) {
return Some((&item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_mut<R, P>(&mut self, resource: &mut R) -> Option<(&mut T, ResourceId)>
where
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_mut);
for item in self.routes.iter_mut() {
if item.0.capture_match_info(resource.resource_path()) {
return Some((&mut item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_fn<R, P, F>(&self, resource: &mut R, check: F) -> Option<(&T, ResourceId)>
where
F: Fn(&R, &Option<U>) -> bool,
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_checked);
for item in self.routes.iter() {
if item.0.capture_match_info_fn(resource, &check, &item.2) {
return Some((&item.1, ResourceId(item.0.id())));
}
}
None
}
pub fn recognize_mut_fn<R, P, F>(
&mut self,
resource: &mut R,
check: F,
) -> Option<(&mut T, ResourceId)>
where
F: Fn(&R, &Option<U>) -> bool,
R: Resource<P>,
P: ResourcePath,
{
profile_method!(recognize_mut_checked);
for item in self.routes.iter_mut() {
if item.0.capture_match_info_fn(resource, &check, &item.2) {
return Some((&mut item.1, ResourceId(item.0.id())));
}
}
None
}
}
pub struct RouterBuilder<T, U = ()> {
resources: Vec<(ResourceDef, T, Option<U>)>,
}
impl<T, U> RouterBuilder<T, U> {
/// Register resource for specified path.
pub fn path<P: IntoPatterns>(
&mut self,
path: P,
resource: T,
) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(path);
self.resources
.push((ResourceDef::new(path), resource, None));
self.resources.last_mut().unwrap()
}
/// Register resource for specified path prefix.
pub fn prefix(&mut self, prefix: &str, resource: T) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(prefix);
self.resources
.push((ResourceDef::prefix(prefix), resource, None));
self.resources.last_mut().unwrap()
}
/// Register resource for ResourceDef
pub fn rdef(&mut self, rdef: ResourceDef, resource: T) -> &mut (ResourceDef, T, Option<U>) {
profile_method!(rdef);
self.resources.push((rdef, resource, None));
self.resources.last_mut().unwrap()
}
/// Finish configuration and create router instance.
pub fn finish(self) -> Router<T, U> {
Router {
routes: self.resources,
}
}
}
#[cfg(test)]
mod tests {
use crate::path::Path;
use crate::router::{ResourceId, Router};
#[allow(clippy::cognitive_complexity)]
#[test]
fn test_recognizer_1() {
let mut router = Router::<usize>::build();
router.path("/name", 10).0.set_id(0);
router.path("/name/{val}", 11).0.set_id(1);
router.path("/name/{val}/index.html", 12).0.set_id(2);
router.path("/file/{file}.{ext}", 13).0.set_id(3);
router.path("/v{val}/{val2}/index.html", 14).0.set_id(4);
router.path("/v/{tail:.*}", 15).0.set_id(5);
router.path("/test2/{test}.html", 16).0.set_id(6);
router.path("/{test}/index.html", 17).0.set_id(7);
let mut router = router.finish();
let mut path = Path::new("/unknown");
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/name");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
assert_eq!(info, ResourceId(0));
assert!(path.is_empty());
let mut path = Path::new("/name/value");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(info, ResourceId(1));
assert_eq!(path.get("val").unwrap(), "value");
assert_eq!(&path["val"], "value");
let mut path = Path::new("/name/value2/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 12);
assert_eq!(info, ResourceId(2));
assert_eq!(path.get("val").unwrap(), "value2");
let mut path = Path::new("/file/file.gz");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 13);
assert_eq!(info, ResourceId(3));
assert_eq!(path.get("file").unwrap(), "file");
assert_eq!(path.get("ext").unwrap(), "gz");
let mut path = Path::new("/vtest/ttt/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 14);
assert_eq!(info, ResourceId(4));
assert_eq!(path.get("val").unwrap(), "test");
assert_eq!(path.get("val2").unwrap(), "ttt");
let mut path = Path::new("/v/blah-blah/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 15);
assert_eq!(info, ResourceId(5));
assert_eq!(path.get("tail").unwrap(), "blah-blah/index.html");
let mut path = Path::new("/test2/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 16);
assert_eq!(info, ResourceId(6));
assert_eq!(path.get("test").unwrap(), "index");
let mut path = Path::new("/bbb/index.html");
let (h, info) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 17);
assert_eq!(info, ResourceId(7));
assert_eq!(path.get("test").unwrap(), "bbb");
}
#[test]
fn test_recognizer_2() {
let mut router = Router::<usize>::build();
router.path("/index.json", 10);
router.path("/{source}.json", 11);
let mut router = router.finish();
let mut path = Path::new("/index.json");
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test.json");
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
}
#[test]
fn test_recognizer_with_prefix() {
let mut router = Router::<usize>::build();
router.path("/name", 10).0.set_id(0);
router.path("/name/{val}", 11).0.set_id(1);
let mut router = router.finish();
let mut path = Path::new("/name");
path.skip(5);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test/name");
path.skip(5);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test/name/value");
path.skip(5);
let (h, id) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(id, ResourceId(1));
assert_eq!(path.get("val").unwrap(), "value");
assert_eq!(&path["val"], "value");
// same patterns
let mut router = Router::<usize>::build();
router.path("/name", 10);
router.path("/name/{val}", 11);
let mut router = router.finish();
let mut path = Path::new("/name");
path.skip(6);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test2/name");
path.skip(6);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 10);
let mut path = Path::new("/test2/name-test");
path.skip(6);
assert!(router.recognize_mut(&mut path).is_none());
let mut path = Path::new("/test2/name/ttt");
path.skip(6);
let (h, _) = router.recognize_mut(&mut path).unwrap();
assert_eq!(*h, 11);
assert_eq!(&path["val"], "ttt");
}
}

288
actix-router/src/url.rs Normal file
View File

@ -0,0 +1,288 @@
use crate::ResourcePath;
#[allow(dead_code)]
const GEN_DELIMS: &[u8] = b":/?#[]@";
#[allow(dead_code)]
const SUB_DELIMS_WITHOUT_QS: &[u8] = b"!$'()*,";
#[allow(dead_code)]
const SUB_DELIMS: &[u8] = b"!$'()*,+?=;";
#[allow(dead_code)]
const RESERVED: &[u8] = b":/?#[]@!$'()*,+?=;";
#[allow(dead_code)]
const UNRESERVED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~";
const ALLOWED: &[u8] = b"abcdefghijklmnopqrstuvwxyz
ABCDEFGHIJKLMNOPQRSTUVWXYZ
1234567890
-._~
!$'()*,";
const QS: &[u8] = b"+&=;b";
#[inline]
fn bit_at(array: &[u8], ch: u8) -> bool {
array[(ch >> 3) as usize] & (1 << (ch & 7)) != 0
}
#[inline]
fn set_bit(array: &mut [u8], ch: u8) {
array[(ch >> 3) as usize] |= 1 << (ch & 7)
}
thread_local! {
static DEFAULT_QUOTER: Quoter = Quoter::new(b"@:", b"%/+");
}
#[derive(Default, Clone, Debug)]
pub struct Url {
uri: http::Uri,
path: Option<String>,
}
impl Url {
pub fn new(uri: http::Uri) -> Url {
let path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
Url { uri, path }
}
pub fn with_quoter(uri: http::Uri, quoter: &Quoter) -> Url {
Url {
path: quoter.requote(uri.path().as_bytes()),
uri,
}
}
pub fn uri(&self) -> &http::Uri {
&self.uri
}
pub fn path(&self) -> &str {
if let Some(ref s) = self.path {
s
} else {
self.uri.path()
}
}
#[inline]
pub fn update(&mut self, uri: &http::Uri) {
self.uri = uri.clone();
self.path = DEFAULT_QUOTER.with(|q| q.requote(uri.path().as_bytes()));
}
#[inline]
pub fn update_with_quoter(&mut self, uri: &http::Uri, quoter: &Quoter) {
self.uri = uri.clone();
self.path = quoter.requote(uri.path().as_bytes());
}
}
impl ResourcePath for Url {
#[inline]
fn path(&self) -> &str {
self.path()
}
}
pub struct Quoter {
safe_table: [u8; 16],
protected_table: [u8; 16],
}
impl Quoter {
pub fn new(safe: &[u8], protected: &[u8]) -> Quoter {
let mut q = Quoter {
safe_table: [0; 16],
protected_table: [0; 16],
};
// prepare safe table
for i in 0..128 {
if ALLOWED.contains(&i) {
set_bit(&mut q.safe_table, i);
}
if QS.contains(&i) {
set_bit(&mut q.safe_table, i);
}
}
for ch in safe {
set_bit(&mut q.safe_table, *ch)
}
// prepare protected table
for ch in protected {
set_bit(&mut q.safe_table, *ch);
set_bit(&mut q.protected_table, *ch);
}
q
}
pub fn requote(&self, val: &[u8]) -> Option<String> {
let mut has_pct = 0;
let mut pct = [b'%', 0, 0];
let mut idx = 0;
let mut cloned: Option<Vec<u8>> = None;
let len = val.len();
while idx < len {
let ch = val[idx];
if has_pct != 0 {
pct[has_pct] = val[idx];
has_pct += 1;
if has_pct == 3 {
has_pct = 0;
let buf = cloned.as_mut().unwrap();
if let Some(ch) = restore_ch(pct[1], pct[2]) {
if ch < 128 {
if bit_at(&self.protected_table, ch) {
buf.extend_from_slice(&pct);
idx += 1;
continue;
}
if bit_at(&self.safe_table, ch) {
buf.push(ch);
idx += 1;
continue;
}
}
buf.push(ch);
} else {
buf.extend_from_slice(&pct[..]);
}
}
} else if ch == b'%' {
has_pct = 1;
if cloned.is_none() {
let mut c = Vec::with_capacity(len);
c.extend_from_slice(&val[..idx]);
cloned = Some(c);
}
} else if let Some(ref mut cloned) = cloned {
cloned.push(ch)
}
idx += 1;
}
cloned.map(|data| String::from_utf8_lossy(&data).into_owned())
}
}
#[inline]
fn from_hex(v: u8) -> Option<u8> {
if (b'0'..=b'9').contains(&v) {
Some(v - 0x30) // ord('0') == 0x30
} else if (b'A'..=b'F').contains(&v) {
Some(v - 0x41 + 10) // ord('A') == 0x41
} else if (b'a'..=b'f').contains(&v) {
Some(v - 0x61 + 10) // ord('a') == 0x61
} else {
None
}
}
#[inline]
fn restore_ch(d1: u8, d2: u8) -> Option<u8> {
from_hex(d1).and_then(|d1| from_hex(d2).map(move |d2| d1 << 4 | d2))
}
#[cfg(test)]
mod tests {
use http::Uri;
use std::convert::TryFrom;
use super::*;
use crate::{Path, ResourceDef};
const PROTECTED: &[u8] = b"%/+";
fn match_url(pattern: &'static str, url: impl AsRef<str>) -> Path<Url> {
let re = ResourceDef::new(pattern);
let uri = Uri::try_from(url.as_ref()).unwrap();
let mut path = Path::new(Url::new(uri));
assert!(re.capture_match_info(&mut path));
path
}
fn percent_encode(data: &[u8]) -> String {
data.iter().map(|c| format!("%{:02X}", c)).collect()
}
#[test]
fn test_parse_url() {
let re = "/user/{id}/test";
let path = match_url(re, "/user/2345/test");
assert_eq!(path.get("id").unwrap(), "2345");
// "%25" should never be decoded into '%' to guarantee the output is a valid
// percent-encoded format
let path = match_url(re, "/user/qwe%25/test");
assert_eq!(path.get("id").unwrap(), "qwe%25");
let path = match_url(re, "/user/qwe%25rty/test");
assert_eq!(path.get("id").unwrap(), "qwe%25rty");
}
#[test]
fn test_protected_chars() {
let encoded = percent_encode(PROTECTED);
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
assert_eq!(path.get("id").unwrap(), &encoded);
}
#[test]
fn test_non_protecteed_ascii() {
let nonprotected_ascii = ('\u{0}'..='\u{7F}')
.filter(|&c| c.is_ascii() && !PROTECTED.contains(&(c as u8)))
.collect::<String>();
let encoded = percent_encode(nonprotected_ascii.as_bytes());
let path = match_url("/user/{id}/test", format!("/user/{}/test", encoded));
assert_eq!(path.get("id").unwrap(), &nonprotected_ascii);
}
#[test]
fn test_valid_utf8_multibyte() {
let test = ('\u{FF00}'..='\u{FFFF}').collect::<String>();
let encoded = percent_encode(test.as_bytes());
let path = match_url("/a/{id}/b", format!("/a/{}/b", &encoded));
assert_eq!(path.get("id").unwrap(), &test);
}
#[test]
fn test_invalid_utf8() {
let invalid_utf8 = percent_encode((0x80..=0xff).collect::<Vec<_>>().as_slice());
let uri = Uri::try_from(format!("/{}", invalid_utf8)).unwrap();
let path = Path::new(Url::new(uri));
// We should always get a valid utf8 string
assert!(String::from_utf8(path.path().as_bytes().to_owned()).is_ok());
}
#[test]
fn test_from_hex() {
let hex = b"0123456789abcdefABCDEF";
for i in 0..256 {
let c = i as u8;
if hex.contains(&c) {
assert!(from_hex(c).is_some())
} else {
assert!(from_hex(c).is_none())
}
}
let expected = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 10, 11, 12, 13, 14, 15,
];
for i in 0..hex.len() {
assert_eq!(from_hex(hex[i]).unwrap(), expected[i]);
}
}
}

View File

@ -3,6 +3,10 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.1.0-beta.4 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 0.1.0-beta.3 - 2021-06-20 ## 0.1.0-beta.3 - 2021-06-20
* No significant changes from `0.1.0-beta.2`. * No significant changes from `0.1.0-beta.2`.

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-test" name = "actix-test"
version = "0.1.0-beta.3" version = "0.1.0-beta.4"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"Rob Ede <robjtede@icloud.com>", "Rob Ede <robjtede@icloud.com>",
@ -20,13 +20,13 @@ openssl = ["tls-openssl", "actix-http/openssl"]
[dependencies] [dependencies]
actix-codec = "0.4.0" actix-codec = "0.4.0"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
actix-http-test = { version = "3.0.0-beta.4", features = [] } actix-http-test = "3.0.0-beta.5"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] }
actix-rt = "2.1" actix-rt = "2.1"
awc = { version = "3.0.0-beta.7", default-features = false, features = ["cookies"] } awc = { version = "3.0.0-beta.8", default-features = false, features = ["cookies"] }
futures-core = { version = "0.3.7", default-features = false, features = ["std"] } futures-core = { version = "0.3.7", default-features = false, features = ["std"] }
futures-util = { version = "0.3.7", default-features = false, features = [] } futures-util = { version = "0.3.7", default-features = false, features = [] }

View File

@ -3,6 +3,10 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 4.0.0-beta.7 - 2021-09-09
* Minimum supported Rust version (MSRV) is now 1.51.
## 4.0.0-beta.6 - 2021-06-26 ## 4.0.0-beta.6 - 2021-06-26
* Update `actix` to `0.12`. [#2277] * Update `actix` to `0.12`. [#2277]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web-actors" name = "actix-web-actors"
version = "4.0.0-beta.6" version = "4.0.0-beta.7"
authors = ["Nikolay Kim <fafhrd91@gmail.com>"] authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
description = "Actix actors support for Actix Web" description = "Actix actors support for Actix Web"
keywords = ["actix", "http", "web", "framework", "async"] keywords = ["actix", "http", "web", "framework", "async"]
@ -16,8 +16,8 @@ path = "src/lib.rs"
[dependencies] [dependencies]
actix = { version = "0.12.0", default-features = false } actix = { version = "0.12.0", default-features = false }
actix-codec = "0.4.0" actix-codec = "0.4.0"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
actix-web = { version = "4.0.0-beta.8", default-features = false } actix-web = { version = "4.0.0-beta.9", default-features = false }
bytes = "1" bytes = "1"
bytestring = "1" bytestring = "1"
@ -29,6 +29,6 @@ tokio = { version = "1", features = ["sync"] }
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1.0-beta.3" actix-test = "0.1.0-beta.3"
awc = { version = "3.0.0-beta.7", default-features = false } awc = { version = "3.0.0-beta.8", default-features = false }
env_logger = "0.8" env_logger = "0.8"
futures-util = { version = "0.3.7", default-features = false } futures-util = { version = "0.3.7", default-features = false }

View File

@ -3,15 +3,15 @@
> Actix actors support for Actix Web. > Actix actors support for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors)
[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.6)](https://docs.rs/actix-web-actors/4.0.0-beta.6) [![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.7)](https://docs.rs/actix-web-actors/4.0.0-beta.7)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![License](https://img.shields.io/crates/l/actix-web-actors.svg) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6) [![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7)
[![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors) [![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-actors) - [API Documentation](https://docs.rs/actix-web-actors)
- Minimum supported Rust version: 1.46 or later - Minimum supported Rust version: 1.51 or later

View File

@ -3,6 +3,13 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 0.5.0-beta.4 - 2021-09-09
* In routing macros, paths are now validated at compile time. [#2350]
* Minimum supported Rust version (MSRV) is now 1.51.
[#2350]: https://github.com/actix/actix-web/pull/2350
## 0.5.0-beta.3 - 2021-06-17 ## 0.5.0-beta.3 - 2021-06-17
* No notable changes. * No notable changes.

View File

@ -1,6 +1,6 @@
[package] [package]
name = "actix-web-codegen" name = "actix-web-codegen"
version = "0.5.0-beta.3" version = "0.5.0-beta.4"
description = "Routing and runtime macros for Actix Web" description = "Routing and runtime macros for Actix Web"
readme = "README.md" readme = "README.md"
homepage = "https://actix.rs" homepage = "https://actix.rs"
@ -17,12 +17,13 @@ proc-macro = true
quote = "1" quote = "1"
syn = { version = "1", features = ["full", "parsing"] } syn = { version = "1", features = ["full", "parsing"] }
proc-macro2 = "1" proc-macro2 = "1"
actix-router = "0.5.0-beta.2"
[dev-dependencies] [dev-dependencies]
actix-rt = "2.2" actix-rt = "2.2"
actix-test = "0.1.0-beta.3" actix-test = "0.1.0-beta.3"
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-web = "4.0.0-beta.8" actix-web = "4.0.0-beta.9"
futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] } futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] }
trybuild = "1" trybuild = "1"

View File

@ -3,18 +3,18 @@
> Routing and runtime macros for Actix Web. > Routing and runtime macros for Actix Web.
[![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen)
[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.3)](https://docs.rs/actix-web-codegen/0.5.0-beta.3) [![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.4)](https://docs.rs/actix-web-codegen/0.5.0-beta.4)
[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html)
![License](https://img.shields.io/crates/l/actix-web-codegen.svg) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
<br /> <br />
[![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3) [![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4)
[![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen) [![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/actix-web-codegen) - [API Documentation](https://docs.rs/actix-web-codegen)
- Minimum supported Rust version: 1.46 or later. - Minimum supported Rust version: 1.51 or later.
## Compile Testing ## Compile Testing

View File

@ -3,6 +3,7 @@ extern crate proc_macro;
use std::collections::HashSet; use std::collections::HashSet;
use std::convert::TryFrom; use std::convert::TryFrom;
use actix_router::ResourceDef;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2}; use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::{format_ident, quote, ToTokens, TokenStreamExt}; use quote::{format_ident, quote, ToTokens, TokenStreamExt};
@ -101,6 +102,7 @@ impl Args {
match arg { match arg {
NestedMeta::Lit(syn::Lit::Str(lit)) => match path { NestedMeta::Lit(syn::Lit::Str(lit)) => match path {
None => { None => {
let _ = ResourceDef::new(lit.value());
path = Some(lit); path = Some(lit);
} }
_ => { _ => {

View File

@ -1,4 +1,4 @@
#[rustversion::stable(1.46)] // MSRV #[rustversion::stable(1.51)] // MSRV
#[test] #[test]
fn compile_macros() { fn compile_macros() {
let t = trybuild::TestCases::new(); let t = trybuild::TestCases::new();
@ -10,6 +10,7 @@ fn compile_macros() {
t.compile_fail("tests/trybuild/route-missing-method-fail.rs"); t.compile_fail("tests/trybuild/route-missing-method-fail.rs");
t.compile_fail("tests/trybuild/route-duplicate-method-fail.rs"); t.compile_fail("tests/trybuild/route-duplicate-method-fail.rs");
t.compile_fail("tests/trybuild/route-unexpected-method-fail.rs"); t.compile_fail("tests/trybuild/route-unexpected-method-fail.rs");
t.compile_fail("tests/trybuild/route-malformed-path-fail.rs");
t.pass("tests/trybuild/docstring-ok.rs"); t.pass("tests/trybuild/docstring-ok.rs");
} }

View File

@ -0,0 +1,33 @@
use actix_web_codegen::get;
#[get("/{")]
async fn zero() -> &'static str {
"malformed resource def"
}
#[get("/{foo")]
async fn one() -> &'static str {
"malformed resource def"
}
#[get("/{}")]
async fn two() -> &'static str {
"malformed resource def"
}
#[get("/*")]
async fn three() -> &'static str {
"malformed resource def"
}
#[get("/{tail:\\d+}*")]
async fn four() -> &'static str {
"malformed resource def"
}
#[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
async fn five() -> &'static str {
"malformed resource def"
}
fn main() {}

View File

@ -0,0 +1,42 @@
error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:3:1
|
3 | #[get("/{")]
| ^^^^^^^^^^^^
|
= help: message: pattern "{" contains malformed dynamic segment
error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:8:1
|
8 | #[get("/{foo")]
| ^^^^^^^^^^^^^^^
|
= help: message: pattern "{foo" contains malformed dynamic segment
error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:13:1
|
13 | #[get("/{}")]
| ^^^^^^^^^^^^^
|
= help: message: Wrong path pattern: "/{}" regex parse error:
((?s-m)^/(?P<>[^/]+))$
^
error: empty capture group name
error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:23:1
|
23 | #[get("/{tail:\\d+}*")]
| ^^^^^^^^^^^^^^^^^^^^^^^
|
= help: message: custom regex is not supported for tail match
error: custom attribute panicked
--> $DIR/route-malformed-path-fail.rs:28:1
|
28 | #[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: message: Only 16 dynamic segments are allowed, provided: 17

View File

@ -3,6 +3,13 @@
## Unreleased - 2021-xx-xx ## Unreleased - 2021-xx-xx
## 3.0.0-beta.8 - 2021-09-09
### Changed
* Send headers within the redirect requests. [#2310]
[#2310]: https://github.com/actix/actix-web/pull/2310
## 3.0.0-beta.7 - 2021-06-26 ## 3.0.0-beta.7 - 2021-06-26
### Changed ### Changed
* Change compression algorithm features flags. [#2250] * Change compression algorithm features flags. [#2250]

View File

@ -1,6 +1,6 @@
[package] [package]
name = "awc" name = "awc"
version = "3.0.0-beta.7" version = "3.0.0-beta.8"
authors = [ authors = [
"Nikolay Kim <fafhrd91@gmail.com>", "Nikolay Kim <fafhrd91@gmail.com>",
"fakeshadow <24548779@qq.com>", "fakeshadow <24548779@qq.com>",
@ -55,7 +55,7 @@ __compress = []
[dependencies] [dependencies]
actix-codec = "0.4.0" actix-codec = "0.4.0"
actix-service = "2.0.0" actix-service = "2.0.0"
actix-http = "3.0.0-beta.8" actix-http = "3.0.0-beta.10"
actix-rt = { version = "2.1", default-features = false } actix-rt = { version = "2.1", default-features = false }
base64 = "0.13" base64 = "0.13"
@ -77,9 +77,9 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true }
tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] } tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] }
[dev-dependencies] [dev-dependencies]
actix-web = { version = "4.0.0-beta.8", features = ["openssl"] } actix-web = { version = "4.0.0-beta.9", features = ["openssl"] }
actix-http = { version = "3.0.0-beta.8", features = ["openssl"] } actix-http = { version = "3.0.0-beta.10", features = ["openssl"] }
actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] } actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] }
actix-utils = "3.0.0" actix-utils = "3.0.0"
actix-server = "2.0.0-beta.3" actix-server = "2.0.0-beta.3"
actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] } actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] }

View File

@ -3,16 +3,16 @@
> Async HTTP and WebSocket client library. > Async HTTP and WebSocket client library.
[![crates.io](https://img.shields.io/crates/v/awc?label=latest)](https://crates.io/crates/awc) [![crates.io](https://img.shields.io/crates/v/awc?label=latest)](https://crates.io/crates/awc)
[![Documentation](https://docs.rs/awc/badge.svg?version=3.0.0-beta.7)](https://docs.rs/awc/3.0.0-beta.7) [![Documentation](https://docs.rs/awc/badge.svg?version=3.0.0-beta.8)](https://docs.rs/awc/3.0.0-beta.8)
![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/awc) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/awc)
[![Dependency Status](https://deps.rs/crate/awc/3.0.0-beta.7/status.svg)](https://deps.rs/crate/awc/3.0.0-beta.7) [![Dependency Status](https://deps.rs/crate/awc/3.0.0-beta.8/status.svg)](https://deps.rs/crate/awc/3.0.0-beta.8)
[![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x)
## Documentation & Resources ## Documentation & Resources
- [API Documentation](https://docs.rs/awc) - [API Documentation](https://docs.rs/awc)
- [Example Project](https://github.com/actix/examples/tree/HEAD/security/awc_https) - [Example Project](https://github.com/actix/examples/tree/HEAD/security/awc_https)
- Minimum Supported Rust Version (MSRV): 1.46.0 - Minimum Supported Rust Version (MSRV): 1.51.0
## Example ## Example

View File

@ -85,10 +85,12 @@ where
let max_redirect_times = self.max_redirect_times; let max_redirect_times = self.max_redirect_times;
// backup the uri and method for reuse schema and authority. // backup the uri and method for reuse schema and authority.
let (uri, method) = match head { let (uri, method, headers) = match head {
RequestHeadType::Owned(ref head) => (head.uri.clone(), head.method.clone()), RequestHeadType::Owned(ref head) => {
(head.uri.clone(), head.method.clone(), head.headers.clone())
}
RequestHeadType::Rc(ref head, ..) => { RequestHeadType::Rc(ref head, ..) => {
(head.uri.clone(), head.method.clone()) (head.uri.clone(), head.method.clone(), head.headers.clone())
} }
}; };
@ -104,6 +106,7 @@ where
max_redirect_times, max_redirect_times,
uri: Some(uri), uri: Some(uri),
method: Some(method), method: Some(method),
headers: Some(headers),
body: body_opt, body: body_opt,
addr, addr,
connector: Some(connector), connector: Some(connector),
@ -127,9 +130,10 @@ pin_project_lite::pin_project! {
max_redirect_times: u8, max_redirect_times: u8,
uri: Option<Uri>, uri: Option<Uri>,
method: Option<Method>, method: Option<Method>,
headers: Option<header::HeaderMap>,
body: Option<Bytes>, body: Option<Bytes>,
addr: Option<SocketAddr>, addr: Option<SocketAddr>,
connector: Option<Rc<S>> connector: Option<Rc<S>>,
} }
} }
} }
@ -148,6 +152,7 @@ where
max_redirect_times, max_redirect_times,
uri, uri,
method, method,
headers,
body, body,
addr, addr,
connector, connector,
@ -156,79 +161,60 @@ where
StatusCode::MOVED_PERMANENTLY StatusCode::MOVED_PERMANENTLY
| StatusCode::FOUND | StatusCode::FOUND
| StatusCode::SEE_OTHER | StatusCode::SEE_OTHER
| StatusCode::TEMPORARY_REDIRECT
| StatusCode::PERMANENT_REDIRECT
if *max_redirect_times > 0 => if *max_redirect_times > 0 =>
{ {
let org_uri = uri.take().unwrap(); let is_redirect = res.head().status == StatusCode::TEMPORARY_REDIRECT
// rebuild uri from the location header value. || res.head().status == StatusCode::PERMANENT_REDIRECT;
let uri = rebuild_uri(&res, org_uri)?;
// reset method let prev_uri = uri.take().unwrap();
let method = method.take().unwrap();
let method = match method { // rebuild uri from the location header value.
Method::GET | Method::HEAD => method, let next_uri = build_next_uri(&res, &prev_uri)?;
_ => Method::GET,
};
// take ownership of states that could be reused // take ownership of states that could be reused
let addr = addr.take(); let addr = addr.take();
let connector = connector.take(); let connector = connector.take();
let mut max_redirect_times = *max_redirect_times;
// use a new request head. // reset method
let mut head = RequestHead::default(); let method = if is_redirect {
head.uri = uri.clone(); method.take().unwrap()
head.method = method.clone(); } else {
let method = method.take().unwrap();
let head = RequestHeadType::Owned(head); match method {
Method::GET | Method::HEAD => method,
max_redirect_times -= 1; _ => Method::GET,
}
let fut = connector
.as_ref()
.unwrap()
// remove body
.call(ConnectRequest::Client(head, Body::None, addr));
self.set(RedirectServiceFuture::Client {
fut,
max_redirect_times,
uri: Some(uri),
method: Some(method),
// body is dropped on 301,302,303
body: None,
addr,
connector,
});
self.poll(cx)
}
StatusCode::TEMPORARY_REDIRECT | StatusCode::PERMANENT_REDIRECT
if *max_redirect_times > 0 =>
{
let org_uri = uri.take().unwrap();
// rebuild uri from the location header value.
let uri = rebuild_uri(&res, org_uri)?;
// try to reuse body
let body = body.take();
let body_new = match body {
Some(ref bytes) => Body::Bytes(bytes.clone()),
// TODO: should this be Body::Empty or Body::None.
_ => Body::Empty,
}; };
let addr = addr.take(); let mut body = body.take();
let method = method.take().unwrap(); let body_new = if is_redirect {
let connector = connector.take(); // try to reuse body
let mut max_redirect_times = *max_redirect_times; match body {
Some(ref bytes) => Body::Bytes(bytes.clone()),
// TODO: should this be Body::Empty or Body::None.
_ => Body::Empty,
}
} else {
body = None;
// remove body
Body::None
};
let mut headers = headers.take().unwrap();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
// use a new request head. // use a new request head.
let mut head = RequestHead::default(); let mut head = RequestHead::default();
head.uri = uri.clone(); head.uri = next_uri.clone();
head.method = method.clone(); head.method = method.clone();
head.headers = headers.clone();
let head = RequestHeadType::Owned(head); let head = RequestHeadType::Owned(head);
let mut max_redirect_times = *max_redirect_times;
max_redirect_times -= 1; max_redirect_times -= 1;
let fut = connector let fut = connector
@ -239,8 +225,9 @@ where
self.set(RedirectServiceFuture::Client { self.set(RedirectServiceFuture::Client {
fut, fut,
max_redirect_times, max_redirect_times,
uri: Some(uri), uri: Some(next_uri),
method: Some(method), method: Some(method),
headers: Some(headers),
body, body,
addr, addr,
connector, connector,
@ -256,7 +243,7 @@ where
} }
} }
fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestError> { fn build_next_uri(res: &ClientResponse, prev_uri: &Uri) -> Result<Uri, SendRequestError> {
let uri = res let uri = res
.headers() .headers()
.get(header::LOCATION) .get(header::LOCATION)
@ -266,8 +253,8 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
.map_err(|e| SendRequestError::Url(InvalidUrl::HttpError(e.into())))?; .map_err(|e| SendRequestError::Url(InvalidUrl::HttpError(e.into())))?;
if uri.scheme().is_none() || uri.authority().is_none() { if uri.scheme().is_none() || uri.authority().is_none() {
let uri = Uri::builder() let uri = Uri::builder()
.scheme(org_uri.scheme().cloned().unwrap()) .scheme(prev_uri.scheme().cloned().unwrap())
.authority(org_uri.authority().cloned().unwrap()) .authority(prev_uri.authority().cloned().unwrap())
.path_and_query(value.as_bytes()) .path_and_query(value.as_bytes())
.build()?; .build()?;
Ok::<_, SendRequestError>(uri) Ok::<_, SendRequestError>(uri)
@ -281,12 +268,25 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result<Uri, SendRequestErr
Ok(uri) Ok(uri)
} }
fn remove_sensitive_headers(headers: &mut header::HeaderMap, prev_uri: &Uri, next_uri: &Uri) {
if next_uri.host() != prev_uri.host()
|| next_uri.port() != prev_uri.port()
|| next_uri.scheme() != prev_uri.scheme()
{
headers.remove(header::COOKIE);
headers.remove(header::AUTHORIZATION);
headers.remove(header::PROXY_AUTHORIZATION);
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use actix_web::{web, App, Error, HttpResponse}; use actix_web::{web, App, Error, HttpRequest, HttpResponse};
use super::*; use super::*;
use crate::http::HeaderValue;
use crate::ClientBuilder; use crate::ClientBuilder;
use std::str::FromStr;
#[actix_rt::test] #[actix_rt::test]
async fn test_basic_redirect() { async fn test_basic_redirect() {
@ -347,4 +347,239 @@ mod tests {
assert_eq!(res.status().as_u16(), 302); assert_eq!(res.status().as_u16(), 302);
} }
#[actix_rt::test]
async fn test_redirect_status_kind_307_308() {
let srv = actix_test::start(|| {
async fn root() -> HttpResponse {
HttpResponse::TemporaryRedirect()
.append_header(("location", "/test"))
.finish()
}
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
if req.method() == Method::POST && !body.is_empty() {
HttpResponse::Ok().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
App::new()
.service(web::resource("/").route(web::to(root)))
.service(web::resource("/test").route(web::to(test)))
});
let res = srv.post("/").send_body("Hello").await.unwrap();
assert_eq!(res.status().as_u16(), 200);
}
#[actix_rt::test]
async fn test_redirect_status_kind_301_302_303() {
let srv = actix_test::start(|| {
async fn root() -> HttpResponse {
HttpResponse::Found()
.append_header(("location", "/test"))
.finish()
}
async fn test(req: HttpRequest, body: Bytes) -> HttpResponse {
if (req.method() == Method::GET || req.method() == Method::HEAD)
&& body.is_empty()
{
HttpResponse::Ok().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
App::new()
.service(web::resource("/").route(web::to(root)))
.service(web::resource("/test").route(web::to(test)))
});
let res = srv.post("/").send_body("Hello").await.unwrap();
assert_eq!(res.status().as_u16(), 200);
let res = srv.post("/").send().await.unwrap();
assert_eq!(res.status().as_u16(), 200);
}
#[actix_rt::test]
async fn test_redirect_headers() {
let srv = actix_test::start(|| {
async fn root(req: HttpRequest) -> HttpResponse {
if req
.headers()
.get("custom")
.unwrap_or(&HeaderValue::from_str("").unwrap())
== "value"
{
HttpResponse::Found()
.append_header(("location", "/test"))
.finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
async fn test(req: HttpRequest) -> HttpResponse {
if req
.headers()
.get("custom")
.unwrap_or(&HeaderValue::from_str("").unwrap())
== "value"
{
HttpResponse::Ok().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
App::new()
.service(web::resource("/").route(web::to(root)))
.service(web::resource("/test").route(web::to(test)))
});
let client = ClientBuilder::new()
.header("custom", "value")
.disable_redirects()
.finish();
let res = client.get(srv.url("/")).send().await.unwrap();
assert_eq!(res.status().as_u16(), 302);
let client = ClientBuilder::new().header("custom", "value").finish();
let res = client.get(srv.url("/")).send().await.unwrap();
assert_eq!(res.status().as_u16(), 200);
let client = ClientBuilder::new().finish();
let res = client
.get(srv.url("/"))
.insert_header(("custom", "value"))
.send()
.await
.unwrap();
assert_eq!(res.status().as_u16(), 200);
}
#[actix_rt::test]
async fn test_redirect_cross_origin_headers() {
// defining two services to have two different origins
let srv2 = actix_test::start(|| {
async fn root(req: HttpRequest) -> HttpResponse {
if req.headers().get(header::AUTHORIZATION).is_none() {
HttpResponse::Ok().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
App::new().service(web::resource("/").route(web::to(root)))
});
let srv2_port: u16 = srv2.addr().port();
let srv1 = actix_test::start(move || {
async fn root(req: HttpRequest) -> HttpResponse {
let port = *req.app_data::<u16>().unwrap();
if req.headers().get(header::AUTHORIZATION).is_some() {
HttpResponse::Found()
.append_header((
"location",
format!("http://localhost:{}/", port).as_str(),
))
.finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
async fn test1(req: HttpRequest) -> HttpResponse {
if req.headers().get(header::AUTHORIZATION).is_some() {
HttpResponse::Found()
.append_header(("location", "/test2"))
.finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
async fn test2(req: HttpRequest) -> HttpResponse {
if req.headers().get(header::AUTHORIZATION).is_some() {
HttpResponse::Ok().finish()
} else {
HttpResponse::InternalServerError().finish()
}
}
App::new()
.app_data(srv2_port)
.service(web::resource("/").route(web::to(root)))
.service(web::resource("/test1").route(web::to(test1)))
.service(web::resource("/test2").route(web::to(test2)))
});
// send a request to different origins, http://srv1/ then http://srv2/. So it should remove the header
let client = ClientBuilder::new()
.header(header::AUTHORIZATION, "auth_key_value")
.finish();
let res = client.get(srv1.url("/")).send().await.unwrap();
assert_eq!(res.status().as_u16(), 200);
// send a request to same origin, http://srv1/test1 then http://srv1/test2. So it should NOT remove any header
let res = client.get(srv1.url("/test1")).send().await.unwrap();
assert_eq!(res.status().as_u16(), 200);
}
#[actix_rt::test]
async fn test_remove_sensitive_headers() {
fn gen_headers() -> header::HeaderMap {
let mut headers = header::HeaderMap::new();
headers.insert(header::USER_AGENT, HeaderValue::from_str("value").unwrap());
headers.insert(
header::AUTHORIZATION,
HeaderValue::from_str("value").unwrap(),
);
headers.insert(
header::PROXY_AUTHORIZATION,
HeaderValue::from_str("value").unwrap(),
);
headers.insert(header::COOKIE, HeaderValue::from_str("value").unwrap());
headers
}
// Same origin
let prev_uri = Uri::from_str("https://host/path1").unwrap();
let next_uri = Uri::from_str("https://host/path2").unwrap();
let mut headers = gen_headers();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
assert_eq!(headers.len(), 4);
// different schema
let prev_uri = Uri::from_str("http://host/").unwrap();
let next_uri = Uri::from_str("https://host/").unwrap();
let mut headers = gen_headers();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
assert_eq!(headers.len(), 1);
// different host
let prev_uri = Uri::from_str("https://host1/").unwrap();
let next_uri = Uri::from_str("https://host2/").unwrap();
let mut headers = gen_headers();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
assert_eq!(headers.len(), 1);
// different port
let prev_uri = Uri::from_str("https://host:12/").unwrap();
let next_uri = Uri::from_str("https://host:23/").unwrap();
let mut headers = gen_headers();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
assert_eq!(headers.len(), 1);
// different everything!
let prev_uri = Uri::from_str("http://host1:12/path1").unwrap();
let next_uri = Uri::from_str("https://host2:23/path2").unwrap();
let mut headers = gen_headers();
remove_sensitive_headers(&mut headers, &prev_uri, &next_uri);
assert_eq!(headers.len(), 1);
}
} }

View File

@ -1 +1 @@
msrv = "1.46" msrv = "1.51"

View File

@ -4,7 +4,7 @@ digraph {
subgraph cluster_net { subgraph cluster_net {
label="actix-net" label="actix-net"
"actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service" "actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service"
"actix-tls" "actix-tracing" "actix-utils" "actix-router" "actix-tls" "actix-tracing" "actix-utils"
} }
subgraph cluster_other { subgraph cluster_other {
@ -25,7 +25,6 @@ digraph {
"actix-tls" -> { "tokio-util" }[color="#009900"] "actix-tls" -> { "tokio-util" }[color="#009900"]
"actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" } "actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" }
"actix-rt" -> { "actix-macros" "tokio" } "actix-rt" -> { "actix-macros" "tokio" }
"actix-router" -> { "bytestring" }
"local-channel" -> { "local-waker" } "local-channel" -> { "local-waker" }

View File

@ -10,6 +10,7 @@ digraph {
"web-actors" "web-actors"
"web-codegen" "web-codegen"
"http-test" "http-test"
"router"
{ rank=same; "multipart" "web-actors" "http-test" }; { rank=same; "multipart" "web-actors" "http-test" };
{ rank=same; "files" "awc" "web" }; { rank=same; "files" "awc" "web" };
@ -36,7 +37,7 @@ digraph {
"rt" -> { "macros" } "rt" -> { "macros" }
{ rank=same; "utils" "codec" }; { rank=same; "utils" "codec" };
{ rank=same; "rt" "macros" "service" "router" }; { rank=same; "rt" "macros" "service" };
// actix // actix

View File

@ -10,9 +10,10 @@ digraph {
"actix-web-codegen" "actix-web-codegen"
"actix-http-test" "actix-http-test"
"actix-test" "actix-test"
"actix-router"
} }
"actix-web" -> { "actix-web-codegen" "actix-http" } "actix-web" -> { "actix-web-codegen" "actix-http" "actix-router" }
"awc" -> { "actix-http" } "awc" -> { "actix-http" }
"actix-web-actors" -> { "actix" "actix-web" "actix-http" } "actix-web-actors" -> { "actix" "actix-web" "actix-http" }
"actix-multipart" -> { "actix-web" } "actix-multipart" -> { "actix-web" }

View File

@ -338,7 +338,7 @@ where
U: AsRef<str>, U: AsRef<str>,
{ {
let mut rdef = ResourceDef::new(url.as_ref()); let mut rdef = ResourceDef::new(url.as_ref());
*rdef.name_mut() = name.as_ref().to_string(); rdef.set_name(name.as_ref());
self.external.push(rdef); self.external.push(rdef);
self self
} }

View File

@ -79,7 +79,7 @@ where
.into_iter() .into_iter()
.for_each(|mut srv| srv.register(&mut config)); .for_each(|mut srv| srv.register(&mut config));
let mut rmap = ResourceMap::new(ResourceDef::new("")); let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
let (config, services) = config.into_services(); let (config, services) = config.into_services();
@ -104,7 +104,7 @@ where
// complete ResourceMap tree creation // complete ResourceMap tree creation
let rmap = Rc::new(rmap); let rmap = Rc::new(rmap);
rmap.finish(rmap.clone()); ResourceMap::finish(&rmap);
// construct all async data factory futures // construct all async data factory futures
let factory_futs = join_all(self.async_data_factories.iter().map(|f| f())); let factory_futs = join_all(self.async_data_factories.iter().map(|f| f()));
@ -291,7 +291,7 @@ impl Service<ServiceRequest> for AppRouting {
actix_service::always_ready!(); actix_service::always_ready!();
fn call(&self, mut req: ServiceRequest) -> Self::Future { fn call(&self, mut req: ServiceRequest) -> Self::Future {
let res = self.router.recognize_checked(&mut req, |req, guards| { let res = self.router.recognize_fn(&mut req, |req, guards| {
if let Some(ref guards) = guards { if let Some(ref guards) = guards {
for f in guards { for f in guards {
if !f.check(req.head()) { if !f.check(req.head()) {

View File

@ -273,7 +273,7 @@ impl ServiceConfig {
U: AsRef<str>, U: AsRef<str>,
{ {
let mut rdef = ResourceDef::new(url.as_ref()); let mut rdef = ResourceDef::new(url.as_ref());
*rdef.name_mut() = name.as_ref().to_string(); rdef.set_name(name.as_ref());
self.external.push(rdef); self.external.push(rdef);
self self
} }

View File

@ -120,7 +120,6 @@ where
} }
impl<T: ?Sized + 'static> FromRequest for Data<T> { impl<T: ?Sized + 'static> FromRequest for Data<T> {
type Config = ();
type Error = Error; type Error = Error;
type Future = Ready<Result<Self, Error>>; type Future = Ready<Result<Self, Error>>;

View File

@ -18,7 +18,7 @@ pub use actix_http::body::{AnyBody, Body, BodySize, MessageBody, ResponseBody, S
#[cfg(feature = "__compress")] #[cfg(feature = "__compress")]
pub use actix_http::encoding::Decoder as Decompress; pub use actix_http::encoding::Decoder as Decompress;
pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, ResponseHead}; pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, Response, ResponseHead};
pub use actix_router::{Path, ResourceDef, ResourcePath, Url}; pub use actix_router::{Path, ResourceDef, ResourcePath, Url};
pub use actix_server::Server; pub use actix_server::Server;
pub use actix_service::{ pub use actix_service::{
@ -26,13 +26,24 @@ pub use actix_service::{
}; };
use crate::http::header::ContentEncoding; use crate::http::header::ContentEncoding;
use actix_http::{Response, ResponseBuilder}; use actix_http::ResponseBuilder;
pub(crate) fn insert_leading_slash(mut patterns: Vec<String>) -> Vec<String> { use actix_router::Patterns;
for path in &mut patterns {
if !path.is_empty() && !path.starts_with('/') { pub(crate) fn ensure_leading_slash(mut patterns: Patterns) -> Patterns {
path.insert(0, '/'); match &mut patterns {
}; Patterns::Single(pat) => {
if !pat.is_empty() && !pat.starts_with('/') {
pat.insert(0, '/');
};
}
Patterns::List(pats) => {
for pat in pats {
if !pat.is_empty() && !pat.starts_with('/') {
pat.insert(0, '/');
};
}
}
} }
patterns patterns

View File

@ -13,13 +13,52 @@ use futures_core::ready;
use crate::{dev::Payload, Error, HttpRequest}; use crate::{dev::Payload, Error, HttpRequest};
/// Trait implemented by types that can be extracted from request. /// A type that implements [`FromRequest`] is called an **extractor** and can extract data from
/// the request. Some types that implement this trait are: [`Json`], [`Header`], and [`Path`].
/// ///
/// Types that implement this trait can be used with `Route` handlers. /// # Configuration
/// An extractor can be customized by injecting the corresponding configuration with one of:
///
/// - [`App::app_data()`][crate::App::app_data]
/// - [`Scope::app_data()`][crate::Scope::app_data]
/// - [`Resource::app_data()`][crate::Resource::app_data]
///
/// Here are some built-in extractors and their corresponding configuration.
/// Please refer to the respective documentation for details.
///
/// | Extractor | Configuration |
/// |-------------|-------------------|
/// | [`Header`] | _None_ |
/// | [`Path`] | [`PathConfig`] |
/// | [`Json`] | [`JsonConfig`] |
/// | [`Form`] | [`FormConfig`] |
/// | [`Query`] | [`QueryConfig`] |
/// | [`Bytes`] | [`PayloadConfig`] |
/// | [`String`] | [`PayloadConfig`] |
/// | [`Payload`] | [`PayloadConfig`] |
///
/// # Implementing An Extractor
/// To reduce duplicate code in handlers where extracting certain parts of a request has a common
/// structure, you can implement `FromRequest` for your own types.
///
/// Note that the request payload can only be consumed by one extractor.
///
/// [`Header`]: crate::web::Header
/// [`Json`]: crate::web::Json
/// [`JsonConfig`]: crate::web::JsonConfig
/// [`Form`]: crate::web::Form
/// [`FormConfig`]: crate::web::FormConfig
/// [`Path`]: crate::web::Path
/// [`PathConfig`]: crate::web::PathConfig
/// [`Query`]: crate::web::Query
/// [`QueryConfig`]: crate::web::QueryConfig
/// [`Payload`]: crate::web::Payload
/// [`PayloadConfig`]: crate::web::PayloadConfig
/// [`String`]: FromRequest#impl-FromRequest-for-String
/// [`Bytes`]: crate::web::Bytes#impl-FromRequest
/// [`Either`]: crate::web::Either
#[doc(alias = "extract", alias = "extractor")]
pub trait FromRequest: Sized { pub trait FromRequest: Sized {
/// Configuration for this extractor.
type Config: Default + 'static;
/// The associated error which can be returned. /// The associated error which can be returned.
type Error: Into<Error>; type Error: Into<Error>;
@ -35,14 +74,6 @@ pub trait FromRequest: Sized {
fn extract(req: &HttpRequest) -> Self::Future { fn extract(req: &HttpRequest) -> Self::Future {
Self::from_request(req, &mut Payload::None) Self::from_request(req, &mut Payload::None)
} }
/// Create and configure config instance.
fn configure<F>(f: F) -> Self::Config
where
F: FnOnce(Self::Config) -> Self::Config,
{
f(Self::Config::default())
}
} }
/// Optionally extract a field from the request /// Optionally extract a field from the request
@ -65,7 +96,6 @@ pub trait FromRequest: Sized {
/// impl FromRequest for Thing { /// impl FromRequest for Thing {
/// type Error = Error; /// type Error = Error;
/// type Future = Ready<Result<Self, Self::Error>>; /// type Future = Ready<Result<Self, Self::Error>>;
/// type Config = ();
/// ///
/// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { /// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
/// if rand::random() { /// if rand::random() {
@ -100,7 +130,6 @@ where
{ {
type Error = Error; type Error = Error;
type Future = FromRequestOptFuture<T::Future>; type Future = FromRequestOptFuture<T::Future>;
type Config = T::Config;
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
@ -156,7 +185,6 @@ where
/// impl FromRequest for Thing { /// impl FromRequest for Thing {
/// type Error = Error; /// type Error = Error;
/// type Future = Ready<Result<Thing, Error>>; /// type Future = Ready<Result<Thing, Error>>;
/// type Config = ();
/// ///
/// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { /// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future {
/// if rand::random() { /// if rand::random() {
@ -189,7 +217,6 @@ where
{ {
type Error = Error; type Error = Error;
type Future = FromRequestResFuture<T::Future>; type Future = FromRequestResFuture<T::Future>;
type Config = T::Config;
#[inline] #[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
@ -233,7 +260,6 @@ where
impl FromRequest for Uri { impl FromRequest for Uri {
type Error = Infallible; type Error = Infallible;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ok(req.uri().clone()) ok(req.uri().clone())
@ -255,7 +281,6 @@ impl FromRequest for Uri {
impl FromRequest for Method { impl FromRequest for Method {
type Error = Infallible; type Error = Infallible;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ok(req.method().clone()) ok(req.method().clone())
@ -266,7 +291,6 @@ impl FromRequest for Method {
impl FromRequest for () { impl FromRequest for () {
type Error = Infallible; type Error = Infallible;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(_: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(_: &HttpRequest, _: &mut Payload) -> Self::Future {
ok(()) ok(())
@ -306,7 +330,6 @@ macro_rules! tuple_from_req ({$fut_type:ident, $(($n:tt, $T:ident)),+} => {
{ {
type Error = Error; type Error = Error;
type Future = $fut_type<$($T),+>; type Future = $fut_type<$($T),+>;
type Config = ($($T::Config),+);
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
$fut_type { $fut_type {

View File

@ -1,10 +1,10 @@
//! # References //! # References
//! //!
//! "The Content-Disposition Header Field" https://www.ietf.org/rfc/rfc2183.txt //! "The Content-Disposition Header Field" <https://www.ietf.org/rfc/rfc2183.txt>
//! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" https://www.ietf.org/rfc/rfc6266.txt //! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" <https://www.ietf.org/rfc/rfc6266.txt>
//! "Returning Values from Forms: multipart/form-data" https://www.ietf.org/rfc/rfc7578.txt //! "Returning Values from Forms: multipart/form-data" <https://www.ietf.org/rfc/rfc7578.txt>
//! Browser conformance tests at: http://greenbytes.de/tech/tc2231/ //! Browser conformance tests at: <http://greenbytes.de/tech/tc2231/>
//! IANA assignment: http://www.iana.org/assignments/cont-disp/cont-disp.xhtml //! IANA assignment: <http://www.iana.org/assignments/cont-disp/cont-disp.xhtml>
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
@ -457,7 +457,7 @@ impl Header for ContentDisposition {
fn parse<T: crate::HttpMessage>(msg: &T) -> Result<Self, crate::error::ParseError> { fn parse<T: crate::HttpMessage>(msg: &T) -> Result<Self, crate::error::ParseError> {
if let Some(h) = msg.headers().get(&Self::name()) { if let Some(h) = msg.headers().get(&Self::name()) {
Self::from_raw(&h) Self::from_raw(h)
} else { } else {
Err(crate::error::ParseError::Header) Err(crate::error::ParseError::Header)
} }

View File

@ -209,7 +209,6 @@ impl ConnectionInfo {
impl FromRequest for ConnectionInfo { impl FromRequest for ConnectionInfo {
type Error = Infallible; type Error = Infallible;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
ok(req.connection_info().clone()) ok(req.connection_info().clone())
@ -252,7 +251,6 @@ impl ResponseError for MissingPeerAddr {}
impl FromRequest for PeerAddr { impl FromRequest for PeerAddr {
type Error = MissingPeerAddr; type Error = MissingPeerAddr;
type Future = Ready<Result<Self, Self::Error>>; type Future = Ready<Result<Self, Self::Error>>;
type Config = ();
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
match req.peer_addr() { match req.peer_addr() {

View File

@ -53,7 +53,7 @@
//! * SSL support using OpenSSL or Rustls //! * SSL support using OpenSSL or Rustls
//! * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/)) //! * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/))
//! * Includes an async [HTTP client](https://docs.rs/awc/) //! * Includes an async [HTTP client](https://docs.rs/awc/)
//! * Runs on stable Rust 1.46+ //! * Runs on stable Rust 1.51+
//! //!
//! # Crate Features //! # Crate Features
//! * `cookies` - cookies support (enabled by default) //! * `cookies` - cookies support (enabled by default)
@ -96,7 +96,6 @@ pub mod test;
pub(crate) mod types; pub(crate) mod types;
pub mod web; pub mod web;
pub use actix_http::Response as BaseHttpResponse;
pub use actix_http::{body, HttpMessage}; pub use actix_http::{body, HttpMessage};
#[doc(inline)] #[doc(inline)]
pub use actix_rt as rt; pub use actix_rt as rt;

View File

@ -2,10 +2,10 @@
use std::{ use std::{
cmp, cmp,
convert::TryFrom,
future::Future, future::Future,
marker::PhantomData, marker::PhantomData,
pin::Pin, pin::Pin,
str::FromStr,
task::{Context, Poll}, task::{Context, Poll},
}; };
@ -13,16 +13,18 @@ use actix_http::{
body::{MessageBody, ResponseBody}, body::{MessageBody, ResponseBody},
encoding::Encoder, encoding::Encoder,
http::header::{ContentEncoding, ACCEPT_ENCODING}, http::header::{ContentEncoding, ACCEPT_ENCODING},
StatusCode,
}; };
use actix_service::{Service, Transform}; use actix_service::{Service, Transform};
use actix_utils::future::{ok, Ready}; use actix_utils::future::{ok, Either, Ready};
use futures_core::ready; use futures_core::ready;
use once_cell::sync::Lazy;
use pin_project::pin_project; use pin_project::pin_project;
use crate::{ use crate::{
dev::BodyEncoding, dev::BodyEncoding,
service::{ServiceRequest, ServiceResponse}, service::{ServiceRequest, ServiceResponse},
Error, Error, HttpResponse,
}; };
/// Middleware for compressing response payloads. /// Middleware for compressing response payloads.
@ -78,34 +80,78 @@ pub struct CompressMiddleware<S> {
encoding: ContentEncoding, encoding: ContentEncoding,
} }
static SUPPORTED_ALGORITHM_NAMES: Lazy<String> = Lazy::new(|| {
let mut encoding = vec![];
#[cfg(feature = "compress-brotli")]
{
encoding.push("br");
}
#[cfg(feature = "compress-gzip")]
{
encoding.push("gzip");
encoding.push("deflate");
}
#[cfg(feature = "compress-zstd")]
encoding.push("zstd");
assert!(
!encoding.is_empty(),
"encoding can not be empty unless __compress feature has been explicitly enabled by itself"
);
encoding.join(", ")
});
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S> impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
where where
B: MessageBody,
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
B: MessageBody,
{ {
type Response = ServiceResponse<ResponseBody<Encoder<B>>>; type Response = ServiceResponse<ResponseBody<Encoder<B>>>;
type Error = Error; type Error = Error;
type Future = CompressResponse<S, B>; type Future = Either<CompressResponse<S, B>, Ready<Result<Self::Response, Self::Error>>>;
actix_service::forward_ready!(service); actix_service::forward_ready!(service);
#[allow(clippy::borrow_interior_mutable_const)] #[allow(clippy::borrow_interior_mutable_const)]
fn call(&self, req: ServiceRequest) -> Self::Future { fn call(&self, req: ServiceRequest) -> Self::Future {
// negotiate content-encoding // negotiate content-encoding
let encoding = if let Some(val) = req.headers().get(&ACCEPT_ENCODING) { let encoding_result = req
if let Ok(enc) = val.to_str() { .headers()
AcceptEncoding::parse(enc, self.encoding) .get(&ACCEPT_ENCODING)
} else { .and_then(|val| val.to_str().ok())
ContentEncoding::Identity .map(|enc| AcceptEncoding::try_parse(enc, self.encoding));
}
} else {
ContentEncoding::Identity
};
CompressResponse { match encoding_result {
encoding, // Missing header => fallback to identity
fut: self.service.call(req), None => Either::left(CompressResponse {
_phantom: PhantomData, encoding: ContentEncoding::Identity,
fut: self.service.call(req),
_phantom: PhantomData,
}),
// Valid encoding
Some(Ok(encoding)) => Either::left(CompressResponse {
encoding,
fut: self.service.call(req),
_phantom: PhantomData,
}),
// There is an HTTP header but we cannot match what client as asked for
Some(Err(_)) => {
let res = HttpResponse::with_body(
StatusCode::NOT_ACCEPTABLE,
SUPPORTED_ALGORITHM_NAMES.as_str(),
);
let enc = ContentEncoding::Identity;
Either::right(ok(req.into_response(res.map_body(move |head, body| {
Encoder::response(enc, head, ResponseBody::Other(body.into()))
}))))
}
} }
} }
} }
@ -114,7 +160,6 @@ where
pub struct CompressResponse<S, B> pub struct CompressResponse<S, B>
where where
S: Service<ServiceRequest>, S: Service<ServiceRequest>,
B: MessageBody,
{ {
#[pin] #[pin]
fut: S::Future, fut: S::Future,
@ -151,6 +196,7 @@ where
struct AcceptEncoding { struct AcceptEncoding {
encoding: ContentEncoding, encoding: ContentEncoding,
// TODO: use Quality or QualityItem<ContentEncoding>
quality: f64, quality: f64,
} }
@ -177,26 +223,56 @@ impl PartialOrd for AcceptEncoding {
impl PartialEq for AcceptEncoding { impl PartialEq for AcceptEncoding {
fn eq(&self, other: &AcceptEncoding) -> bool { fn eq(&self, other: &AcceptEncoding) -> bool {
self.quality == other.quality self.encoding == other.encoding && self.quality == other.quality
} }
} }
/// Parse q-factor from quality strings.
///
/// If parse fail, then fallback to default value which is 1.
/// More details available here: <https://developer.mozilla.org/en-US/docs/Glossary/Quality_values>
fn parse_quality(parts: &[&str]) -> f64 {
for part in parts {
if part.trim().starts_with("q=") {
return part[2..].parse().unwrap_or(1.0);
}
}
1.0
}
#[derive(Debug, PartialEq, Eq)]
enum AcceptEncodingError {
/// This error occurs when client only support compressed response and server do not have any
/// algorithm that match client accepted algorithms.
CompressionAlgorithmMismatch,
}
impl AcceptEncoding { impl AcceptEncoding {
fn new(tag: &str) -> Option<AcceptEncoding> { fn new(tag: &str) -> Option<AcceptEncoding> {
let parts: Vec<&str> = tag.split(';').collect(); let parts: Vec<&str> = tag.split(';').collect();
let encoding = match parts.len() { let encoding = match parts.len() {
0 => return None, 0 => return None,
_ => ContentEncoding::from(parts[0]), _ => match ContentEncoding::try_from(parts[0]) {
}; Err(_) => return None,
let quality = match parts.len() { Ok(x) => x,
1 => encoding.quality(), },
_ => f64::from_str(parts[1]).unwrap_or(0.0),
}; };
let quality = parse_quality(&parts[1..]);
if quality <= 0.0 || quality > 1.0 {
return None;
}
Some(AcceptEncoding { encoding, quality }) Some(AcceptEncoding { encoding, quality })
} }
/// Parse a raw Accept-Encoding header value into an ordered list. /// Parse a raw Accept-Encoding header value into an ordered list then return the best match
pub fn parse(raw: &str, encoding: ContentEncoding) -> ContentEncoding { /// based on middleware configuration.
pub fn try_parse(
raw: &str,
encoding: ContentEncoding,
) -> Result<ContentEncoding, AcceptEncodingError> {
let mut encodings = raw let mut encodings = raw
.replace(' ', "") .replace(' ', "")
.split(',') .split(',')
@ -206,13 +282,90 @@ impl AcceptEncoding {
encodings.sort(); encodings.sort();
for enc in encodings { for enc in encodings {
if encoding == ContentEncoding::Auto { if encoding == ContentEncoding::Auto || encoding == enc.encoding {
return enc.encoding; return Ok(enc.encoding);
} else if encoding == enc.encoding {
return encoding;
} }
} }
ContentEncoding::Identity // Special case if user cannot accept uncompressed data.
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding
// TODO: account for whitespace
if raw.contains("*;q=0") || raw.contains("identity;q=0") {
return Err(AcceptEncodingError::CompressionAlgorithmMismatch);
}
Ok(ContentEncoding::Identity)
}
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! assert_parse_eq {
($raw:expr, $result:expr) => {
assert_eq!(
AcceptEncoding::try_parse($raw, ContentEncoding::Auto),
Ok($result)
);
};
}
macro_rules! assert_parse_fail {
($raw:expr) => {
assert!(AcceptEncoding::try_parse($raw, ContentEncoding::Auto).is_err());
};
}
#[test]
fn test_parse_encoding() {
// Test simple case
assert_parse_eq!("br", ContentEncoding::Br);
assert_parse_eq!("gzip", ContentEncoding::Gzip);
assert_parse_eq!("deflate", ContentEncoding::Deflate);
assert_parse_eq!("zstd", ContentEncoding::Zstd);
// Test space, trim, missing values
assert_parse_eq!("br,,,,", ContentEncoding::Br);
assert_parse_eq!("gzip , br, zstd", ContentEncoding::Gzip);
// Test float number parsing
assert_parse_eq!("br;q=1 ,", ContentEncoding::Br);
assert_parse_eq!("br;q=1.0 , br", ContentEncoding::Br);
// Test wildcard
assert_parse_eq!("*", ContentEncoding::Identity);
assert_parse_eq!("*;q=1.0", ContentEncoding::Identity);
}
#[test]
fn test_parse_encoding_qfactor_ordering() {
assert_parse_eq!("gzip, br, zstd", ContentEncoding::Gzip);
assert_parse_eq!("zstd, br, gzip", ContentEncoding::Zstd);
assert_parse_eq!("gzip;q=0.4, br;q=0.6", ContentEncoding::Br);
assert_parse_eq!("gzip;q=0.8, br;q=0.4", ContentEncoding::Gzip);
}
#[test]
fn test_parse_encoding_qfactor_invalid() {
// Out of range
assert_parse_eq!("gzip;q=-5.0", ContentEncoding::Identity);
assert_parse_eq!("gzip;q=5.0", ContentEncoding::Identity);
// Disabled
assert_parse_eq!("gzip;q=0", ContentEncoding::Identity);
}
#[test]
fn test_parse_compression_required() {
// Check we fallback to identity if there is an unsupported compression algorithm
assert_parse_eq!("compress", ContentEncoding::Identity);
// User do not want any compression
assert_parse_fail!("compress, identity;q=0");
assert_parse_fail!("compress, identity;q=0.0");
assert_parse_fail!("compress, *;q=0");
assert_parse_fail!("compress, *;q=0.0");
} }
} }

View File

@ -18,7 +18,7 @@ use bytes::Bytes;
use futures_core::ready; use futures_core::ready;
use log::{debug, warn}; use log::{debug, warn};
use regex::{Regex, RegexSet}; use regex::{Regex, RegexSet};
use time::OffsetDateTime; use time::{format_description::well_known::Rfc3339, OffsetDateTime};
use crate::{ use crate::{
dev::{BodySize, MessageBody}, dev::{BodySize, MessageBody},
@ -341,7 +341,6 @@ where
) -> Poll<Option<Result<Bytes, Self::Error>>> { ) -> Poll<Option<Result<Bytes, Self::Error>>> {
let this = self.project(); let this = self.project();
// TODO: MSRV 1.51: poll_map_err
match ready!(this.body.poll_next(cx)) { match ready!(this.body.poll_next(cx)) {
Some(Ok(chunk)) => { Some(Ok(chunk)) => {
*this.size += chunk.len(); *this.size += chunk.len();
@ -539,7 +538,7 @@ impl FormatText {
}; };
} }
FormatText::UrlPath => *self = FormatText::Str(req.path().to_string()), FormatText::UrlPath => *self = FormatText::Str(req.path().to_string()),
FormatText::RequestTime => *self = FormatText::Str(now.format("%Y-%m-%dT%H:%M:%S")), FormatText::RequestTime => *self = FormatText::Str(now.format(&Rfc3339).unwrap()),
FormatText::RequestHeader(ref name) => { FormatText::RequestHeader(ref name) => {
let s = if let Some(val) = req.headers().get(name) { let s = if let Some(val) = req.headers().get(name) {
if let Ok(s) = val.to_str() { if let Ok(s) = val.to_str() {
@ -553,7 +552,7 @@ impl FormatText {
*self = FormatText::Str(s.to_string()); *self = FormatText::Str(s.to_string());
} }
FormatText::RemoteAddr => { FormatText::RemoteAddr => {
let s = if let Some(ref peer) = req.connection_info().remote_addr() { let s = if let Some(peer) = req.connection_info().remote_addr() {
FormatText::Str((*peer).to_string()) FormatText::Str((*peer).to_string())
} else { } else {
FormatText::Str("-".to_string()) FormatText::Str("-".to_string())
@ -768,7 +767,7 @@ mod tests {
Ok(()) Ok(())
}; };
let s = format!("{}", FormatDisplay(&render)); let s = format!("{}", FormatDisplay(&render));
assert!(s.contains(&now.format("%Y-%m-%dT%H:%M:%S"))); assert!(s.contains(&now.format(&Rfc3339).unwrap()));
} }
#[actix_rt::test] #[actix_rt::test]

View File

@ -19,3 +19,43 @@ mod compress;
#[cfg(feature = "__compress")] #[cfg(feature = "__compress")]
pub use self::compress::Compress; pub use self::compress::Compress;
#[cfg(test)]
mod tests {
use crate::{http::StatusCode, App};
use super::*;
#[test]
fn common_combinations() {
// ensure there's no reason that the built-in middleware cannot compose
let _ = App::new()
.wrap(Compat::new(Logger::default()))
.wrap(Condition::new(true, DefaultHeaders::new()))
.wrap(DefaultHeaders::new().header("X-Test2", "X-Value2"))
.wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| {
Ok(ErrorHandlerResponse::Response(res))
}))
.wrap(Logger::default())
.wrap(NormalizePath::new(TrailingSlash::Trim));
let _ = App::new()
.wrap(NormalizePath::new(TrailingSlash::Trim))
.wrap(Logger::default())
.wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| {
Ok(ErrorHandlerResponse::Response(res))
}))
.wrap(DefaultHeaders::new().header("X-Test2", "X-Value2"))
.wrap(Condition::new(true, DefaultHeaders::new()))
.wrap(Compat::new(Logger::default()));
#[cfg(feature = "__compress")]
{
let _ = App::new().wrap(Compress::default()).wrap(Logger::default());
let _ = App::new().wrap(Logger::default()).wrap(Compress::default());
let _ = App::new().wrap(Compat::new(Compress::default()));
let _ = App::new().wrap(Condition::new(true, Compat::new(Compress::default())));
}
}
}

View File

@ -59,7 +59,7 @@ impl Default for TrailingSlash {
/// ///
/// # actix_web::rt::System::new().block_on(async { /// # actix_web::rt::System::new().block_on(async {
/// let app = App::new() /// let app = App::new()
/// .wrap(middleware::NormalizePath::default()) /// .wrap(middleware::NormalizePath::trim())
/// .route("/test", web::get().to(|| async { "test" })) /// .route("/test", web::get().to(|| async { "test" }))
/// .route("/unmatchable/", web::get().to(|| async { "unmatchable" })); /// .route("/unmatchable/", web::get().to(|| async { "unmatchable" }));
/// ///
@ -85,13 +85,31 @@ impl Default for TrailingSlash {
/// assert_eq!(res.status(), StatusCode::NOT_FOUND); /// assert_eq!(res.status(), StatusCode::NOT_FOUND);
/// # }) /// # })
/// ``` /// ```
#[derive(Debug, Clone, Copy, Default)] #[derive(Debug, Clone, Copy)]
pub struct NormalizePath(TrailingSlash); pub struct NormalizePath(TrailingSlash);
impl Default for NormalizePath {
fn default() -> Self {
log::warn!(
"`NormalizePath::default()` is deprecated. The default trailing slash behavior changed \
in v4 from `Always` to `Trim`. Update your call to `NormalizePath::new(...)`."
);
Self(TrailingSlash::Trim)
}
}
impl NormalizePath { impl NormalizePath {
/// Create new `NormalizePath` middleware with the specified trailing slash style. /// Create new `NormalizePath` middleware with the specified trailing slash style.
pub fn new(trailing_slash_style: TrailingSlash) -> Self { pub fn new(trailing_slash_style: TrailingSlash) -> Self {
NormalizePath(trailing_slash_style) Self(trailing_slash_style)
}
/// Constructs a new `NormalizePath` middleware with [trim](TrailingSlash::Trim) semantics.
///
/// Use this instead of `NormalizePath::default()` to avoid deprecation warning.
pub fn trim() -> Self {
Self::new(TrailingSlash::Trim)
} }
} }

View File

@ -184,7 +184,7 @@ impl HttpRequest {
U: IntoIterator<Item = I>, U: IntoIterator<Item = I>,
I: AsRef<str>, I: AsRef<str>,
{ {
self.resource_map().url_for(&self, name, elements) self.resource_map().url_for(self, name, elements)
} }
/// Generate url for named resource /// Generate url for named resource
@ -199,7 +199,7 @@ impl HttpRequest {
#[inline] #[inline]
/// Get a reference to a `ResourceMap` of current application. /// Get a reference to a `ResourceMap` of current application.
pub fn resource_map(&self) -> &ResourceMap { pub fn resource_map(&self) -> &ResourceMap {
&self.app_state().rmap() self.app_state().rmap()
} }
/// Peer socket address. /// Peer socket address.
@ -358,7 +358,6 @@ impl Drop for HttpRequest {
/// } /// }
/// ``` /// ```
impl FromRequest for HttpRequest { impl FromRequest for HttpRequest {
type Config = ();
type Error = Error; type Error = Error;
type Future = Ready<Result<Self, Error>>; type Future = Ready<Result<Self, Error>>;
@ -509,9 +508,9 @@ mod tests {
#[test] #[test]
fn test_url_for() { fn test_url_for() {
let mut res = ResourceDef::new("/user/{name}.{ext}"); let mut res = ResourceDef::new("/user/{name}.{ext}");
*res.name_mut() = "index".to_string(); res.set_name("index");
let mut rmap = ResourceMap::new(ResourceDef::new("")); let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
rmap.add(&mut res, None); rmap.add(&mut res, None);
assert!(rmap.has_resource("/user/test.html")); assert!(rmap.has_resource("/user/test.html"));
assert!(!rmap.has_resource("/test/unknown")); assert!(!rmap.has_resource("/test/unknown"));
@ -539,9 +538,9 @@ mod tests {
#[test] #[test]
fn test_url_for_static() { fn test_url_for_static() {
let mut rdef = ResourceDef::new("/index.html"); let mut rdef = ResourceDef::new("/index.html");
*rdef.name_mut() = "index".to_string(); rdef.set_name("index");
let mut rmap = ResourceMap::new(ResourceDef::new("")); let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
rmap.add(&mut rdef, None); rmap.add(&mut rdef, None);
assert!(rmap.has_resource("/index.html")); assert!(rmap.has_resource("/index.html"));
@ -560,9 +559,9 @@ mod tests {
#[test] #[test]
fn test_match_name() { fn test_match_name() {
let mut rdef = ResourceDef::new("/index.html"); let mut rdef = ResourceDef::new("/index.html");
*rdef.name_mut() = "index".to_string(); rdef.set_name("index");
let mut rmap = ResourceMap::new(ResourceDef::new("")); let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
rmap.add(&mut rdef, None); rmap.add(&mut rdef, None);
assert!(rmap.has_resource("/index.html")); assert!(rmap.has_resource("/index.html"));
@ -579,11 +578,10 @@ mod tests {
fn test_url_for_external() { fn test_url_for_external() {
let mut rdef = ResourceDef::new("https://youtube.com/watch/{video_id}"); let mut rdef = ResourceDef::new("https://youtube.com/watch/{video_id}");
*rdef.name_mut() = "youtube".to_string(); rdef.set_name("youtube");
let mut rmap = ResourceMap::new(ResourceDef::new("")); let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
rmap.add(&mut rdef, None); rmap.add(&mut rdef, None);
assert!(rmap.has_resource("https://youtube.com/watch/unknown"));
let req = TestRequest::default().rmap(rmap).to_http_request(); let req = TestRequest::default().rmap(rmap).to_http_request();
let url = req.url_for("youtube", &["oHg5SJYRHA0"]); let url = req.url_for("youtube", &["oHg5SJYRHA0"]);

View File

@ -64,7 +64,6 @@ impl<T: Clone + 'static> Deref for ReqData<T> {
} }
impl<T: Clone + 'static> FromRequest for ReqData<T> { impl<T: Clone + 'static> FromRequest for ReqData<T> {
type Config = ();
type Error = Error; type Error = Error;
type Future = Ready<Result<Self, Error>>; type Future = Ready<Result<Self, Error>>;

View File

@ -4,7 +4,7 @@ use std::future::Future;
use std::rc::Rc; use std::rc::Rc;
use actix_http::Extensions; use actix_http::Extensions;
use actix_router::IntoPattern; use actix_router::{IntoPatterns, Patterns};
use actix_service::boxed::{self, BoxService, BoxServiceFactory}; use actix_service::boxed::{self, BoxService, BoxServiceFactory};
use actix_service::{ use actix_service::{
apply, apply_fn_factory, fn_service, IntoServiceFactory, Service, ServiceFactory, apply, apply_fn_factory, fn_service, IntoServiceFactory, Service, ServiceFactory,
@ -15,7 +15,7 @@ use futures_util::future::join_all;
use crate::{ use crate::{
data::Data, data::Data,
dev::{insert_leading_slash, AppService, HttpServiceFactory, ResourceDef}, dev::{ensure_leading_slash, AppService, HttpServiceFactory, ResourceDef},
guard::Guard, guard::Guard,
handler::Handler, handler::Handler,
responder::Responder, responder::Responder,
@ -51,7 +51,7 @@ type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Err
/// Default behavior could be overridden with `default_resource()` method. /// Default behavior could be overridden with `default_resource()` method.
pub struct Resource<T = ResourceEndpoint> { pub struct Resource<T = ResourceEndpoint> {
endpoint: T, endpoint: T,
rdef: Vec<String>, rdef: Patterns,
name: Option<String>, name: Option<String>,
routes: Vec<Route>, routes: Vec<Route>,
app_data: Option<Extensions>, app_data: Option<Extensions>,
@ -61,7 +61,7 @@ pub struct Resource<T = ResourceEndpoint> {
} }
impl Resource { impl Resource {
pub fn new<T: IntoPattern>(path: T) -> Resource { pub fn new<T: IntoPatterns>(path: T) -> Resource {
let fref = Rc::new(RefCell::new(None)); let fref = Rc::new(RefCell::new(None));
Resource { Resource {
@ -391,13 +391,13 @@ where
}; };
let mut rdef = if config.is_root() || !self.rdef.is_empty() { let mut rdef = if config.is_root() || !self.rdef.is_empty() {
ResourceDef::new(insert_leading_slash(self.rdef.clone())) ResourceDef::new(ensure_leading_slash(self.rdef.clone()))
} else { } else {
ResourceDef::new(self.rdef.clone()) ResourceDef::new(self.rdef.clone())
}; };
if let Some(ref name) = self.name { if let Some(ref name) = self.name {
*rdef.name_mut() = name.clone(); rdef.set_name(name);
} }
*self.factory_ref.borrow_mut() = Some(ResourceFactory { *self.factory_ref.borrow_mut() = Some(ResourceFactory {

View File

@ -270,7 +270,7 @@ pub(crate) mod tests {
impl BodyTest for Body { impl BodyTest for Body {
fn bin_ref(&self) -> &[u8] { fn bin_ref(&self) -> &[u8] {
match self { match self {
Body::Bytes(ref bin) => &bin, Body::Bytes(ref bin) => bin,
_ => unreachable!("bug in test impl"), _ => unreachable!("bug in test impl"),
} }
} }
@ -283,11 +283,11 @@ pub(crate) mod tests {
fn bin_ref(&self) -> &[u8] { fn bin_ref(&self) -> &[u8] {
match self { match self {
ResponseBody::Body(ref b) => match b { ResponseBody::Body(ref b) => match b {
Body::Bytes(ref bin) => &bin, Body::Bytes(ref bin) => bin,
_ => unreachable!("bug in test impl"), _ => unreachable!("bug in test impl"),
}, },
ResponseBody::Other(ref b) => match b { ResponseBody::Other(ref b) => match b {
Body::Bytes(ref bin) => &bin, Body::Bytes(ref bin) => bin,
_ => unreachable!("bug in test impl"), _ => unreachable!("bug in test impl"),
}, },
} }

View File

@ -10,44 +10,75 @@ use crate::request::HttpRequest;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ResourceMap { pub struct ResourceMap {
root: ResourceDef, pattern: ResourceDef,
/// Named resources within the tree or, for external resources,
/// it points to isolated nodes outside the tree.
named: AHashMap<String, Rc<ResourceMap>>,
parent: RefCell<Weak<ResourceMap>>, parent: RefCell<Weak<ResourceMap>>,
named: AHashMap<String, ResourceDef>,
patterns: Vec<(ResourceDef, Option<Rc<ResourceMap>>)>, /// Must be `None` for "edge" nodes.
nodes: Option<Vec<Rc<ResourceMap>>>,
} }
impl ResourceMap { impl ResourceMap {
/// Creates a _container_ node in the `ResourceMap` tree.
pub fn new(root: ResourceDef) -> Self { pub fn new(root: ResourceDef) -> Self {
ResourceMap { ResourceMap {
root, pattern: root,
parent: RefCell::new(Weak::new()),
named: AHashMap::default(), named: AHashMap::default(),
patterns: Vec::new(), parent: RefCell::new(Weak::new()),
nodes: Some(Vec::new()),
} }
} }
/// Adds a (possibly nested) resource.
///
/// To add a non-prefix pattern, `nested` must be `None`.
/// To add external resource, supply a pattern without a leading `/`.
/// The root pattern of `nested`, if present, should match `pattern`.
pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option<Rc<ResourceMap>>) { pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option<Rc<ResourceMap>>) {
pattern.set_id(self.patterns.len() as u16); pattern.set_id(self.nodes.as_ref().unwrap().len() as u16);
self.patterns.push((pattern.clone(), nested));
if !pattern.name().is_empty() { if let Some(new_node) = nested {
self.named assert_eq!(&new_node.pattern, pattern, "`patern` and `nested` mismatch");
.insert(pattern.name().to_string(), pattern.clone()); self.named.extend(new_node.named.clone().into_iter());
self.nodes.as_mut().unwrap().push(new_node);
} else {
let new_node = Rc::new(ResourceMap {
pattern: pattern.clone(),
named: AHashMap::default(),
parent: RefCell::new(Weak::new()),
nodes: None,
});
if let Some(name) = pattern.name() {
self.named.insert(name.to_owned(), Rc::clone(&new_node));
}
let is_external = match pattern.pattern() {
Some(p) => !p.is_empty() && !p.starts_with('/'),
None => false,
};
// Don't add external resources to the tree
if !is_external {
self.nodes.as_mut().unwrap().push(new_node);
}
} }
} }
pub(crate) fn finish(&self, current: Rc<ResourceMap>) { pub(crate) fn finish(self: &Rc<Self>) {
for (_, nested) in &self.patterns { for node in self.nodes.iter().flatten() {
if let Some(ref nested) = nested { node.parent.replace(Rc::downgrade(self));
*nested.parent.borrow_mut() = Rc::downgrade(&current); ResourceMap::finish(node);
nested.finish(nested.clone());
}
} }
} }
/// Generate url for named resource /// Generate url for named resource
/// ///
/// Check [`HttpRequest::url_for()`](../struct.HttpRequest.html#method. /// Check [`HttpRequest::url_for`] for detailed information.
/// url_for) for detailed information.
pub fn url_for<U, I>( pub fn url_for<U, I>(
&self, &self,
req: &HttpRequest, req: &HttpRequest,
@ -58,192 +89,97 @@ impl ResourceMap {
U: IntoIterator<Item = I>, U: IntoIterator<Item = I>,
I: AsRef<str>, I: AsRef<str>,
{ {
let mut path = String::new();
let mut elements = elements.into_iter(); let mut elements = elements.into_iter();
if self.patterns_for(name, &mut path, &mut elements)?.is_some() { let path = self
if path.starts_with('/') { .named
let conn = req.connection_info(); .get(name)
Ok(Url::parse(&format!( .ok_or(UrlGenerationError::ResourceNotFound)?
"{}://{}{}", .root_rmap_fn(String::with_capacity(24), |mut acc, node| {
conn.scheme(), node.pattern
conn.host(), .resource_path_from_iter(&mut acc, &mut elements)
path .then(|| acc)
))?) })
} else { .ok_or(UrlGenerationError::NotEnoughElements)?;
Ok(Url::parse(&path)?)
} if path.starts_with('/') {
let conn = req.connection_info();
Ok(Url::parse(&format!(
"{}://{}{}",
conn.scheme(),
conn.host(),
path
))?)
} else { } else {
Err(UrlGenerationError::ResourceNotFound) Ok(Url::parse(&path)?)
} }
} }
pub fn has_resource(&self, path: &str) -> bool { pub fn has_resource(&self, path: &str) -> bool {
let path = if path.is_empty() { "/" } else { path }; self.find_matching_node(path).is_some()
for (pattern, rmap) in &self.patterns {
if let Some(ref rmap) = rmap {
if let Some(plen) = pattern.is_prefix_match(path) {
return rmap.has_resource(&path[plen..]);
}
} else if pattern.is_match(path) || pattern.pattern() == "" && path == "/" {
return true;
}
}
false
} }
/// Returns the name of the route that matches the given path or None if no full match /// Returns the name of the route that matches the given path or None if no full match
/// is possible. /// is possible or the matching resource is not named.
pub fn match_name(&self, path: &str) -> Option<&str> { pub fn match_name(&self, path: &str) -> Option<&str> {
let path = if path.is_empty() { "/" } else { path }; self.find_matching_node(path)?.pattern.name()
for (pattern, rmap) in &self.patterns {
if let Some(ref rmap) = rmap {
if let Some(plen) = pattern.is_prefix_match(path) {
return rmap.match_name(&path[plen..]);
}
} else if pattern.is_match(path) {
return match pattern.name() {
"" => None,
s => Some(s),
};
}
}
None
} }
/// Returns the full resource pattern matched against a path or None if no full match /// Returns the full resource pattern matched against a path or None if no full match
/// is possible. /// is possible.
pub fn match_pattern(&self, path: &str) -> Option<String> { pub fn match_pattern(&self, path: &str) -> Option<String> {
let path = if path.is_empty() { "/" } else { path }; self.find_matching_node(path)?.root_rmap_fn(
String::with_capacity(24),
// ensure a full match exists |mut acc, node| {
if !self.has_resource(path) { acc.push_str(node.pattern.pattern()?);
return None; Some(acc)
} },
)
Some(self.traverse_resource_pattern(path))
} }
/// Takes remaining path and tries to match it up against a resource definition within the fn find_matching_node(&self, path: &str) -> Option<&ResourceMap> {
/// current resource map recursively, returning a concatenation of all resource prefixes and self._find_matching_node(path).flatten()
/// patterns matched in the tree.
///
/// Should only be used after checking the resource exists in the map so that partial match
/// patterns are not returned.
fn traverse_resource_pattern(&self, remaining: &str) -> String {
for (pattern, rmap) in &self.patterns {
if let Some(ref rmap) = rmap {
if let Some(prefix_len) = pattern.is_prefix_match(remaining) {
let prefix = pattern.pattern().to_owned();
return [
prefix,
rmap.traverse_resource_pattern(&remaining[prefix_len..]),
]
.concat();
}
} else if pattern.is_match(remaining) {
return pattern.pattern().to_owned();
}
}
String::new()
} }
fn patterns_for<U, I>( /// Returns `None` if root pattern doesn't match;
&self, /// `Some(None)` if root pattern matches but there is no matching child pattern.
name: &str, /// Don't search sideways when `Some(none)` is returned.
path: &mut String, fn _find_matching_node(&self, path: &str) -> Option<Option<&ResourceMap>> {
elements: &mut U, let matched_len = self.pattern.find_match(path)?;
) -> Result<Option<()>, UrlGenerationError> let path = &path[matched_len..];
Some(match &self.nodes {
// find first sub-node to match remaining path
Some(nodes) => nodes
.iter()
.filter_map(|node| node._find_matching_node(path))
.next()
.flatten(),
// only terminate at edge nodes
None => Some(self),
})
}
/// Find `self`'s highest ancestor and then run `F`, providing `B`, in that rmap context.
fn root_rmap_fn<F, B>(&self, init: B, mut f: F) -> Option<B>
where where
U: Iterator<Item = I>, F: FnMut(B, &ResourceMap) -> Option<B>,
I: AsRef<str>,
{ {
if self.pattern_for(name, path, elements)?.is_some() { self._root_rmap_fn(init, &mut f)
Ok(Some(()))
} else {
self.parent_pattern_for(name, path, elements)
}
} }
fn pattern_for<U, I>( /// Run `F`, providing `B`, if `self` is top-level resource map, else recurse to parent map.
&self, fn _root_rmap_fn<F, B>(&self, init: B, f: &mut F) -> Option<B>
name: &str,
path: &mut String,
elements: &mut U,
) -> Result<Option<()>, UrlGenerationError>
where where
U: Iterator<Item = I>, F: FnMut(B, &ResourceMap) -> Option<B>,
I: AsRef<str>,
{ {
if let Some(pattern) = self.named.get(name) { let data = match self.parent.borrow().upgrade() {
if pattern.pattern().starts_with('/') { Some(ref parent) => parent._root_rmap_fn(init, f)?,
self.fill_root(path, elements)?; None => init,
} };
if pattern.resource_path(path, elements) {
Ok(Some(()))
} else {
Err(UrlGenerationError::NotEnoughElements)
}
} else {
for (_, rmap) in &self.patterns {
if let Some(ref rmap) = rmap {
if rmap.pattern_for(name, path, elements)?.is_some() {
return Ok(Some(()));
}
}
}
Ok(None)
}
}
fn fill_root<U, I>( f(data, self)
&self,
path: &mut String,
elements: &mut U,
) -> Result<(), UrlGenerationError>
where
U: Iterator<Item = I>,
I: AsRef<str>,
{
if let Some(ref parent) = self.parent.borrow().upgrade() {
parent.fill_root(path, elements)?;
}
if self.root.resource_path(path, elements) {
Ok(())
} else {
Err(UrlGenerationError::NotEnoughElements)
}
}
fn parent_pattern_for<U, I>(
&self,
name: &str,
path: &mut String,
elements: &mut U,
) -> Result<Option<()>, UrlGenerationError>
where
U: Iterator<Item = I>,
I: AsRef<str>,
{
if let Some(ref parent) = self.parent.borrow().upgrade() {
if let Some(pattern) = parent.named.get(name) {
self.fill_root(path, elements)?;
if pattern.resource_path(path, elements) {
Ok(Some(()))
} else {
Err(UrlGenerationError::NotEnoughElements)
}
} else {
parent.parent_pattern_for(name, path, elements)
}
} else {
Ok(None)
}
} }
} }
@ -255,7 +191,7 @@ mod tests {
fn extract_matched_pattern() { fn extract_matched_pattern() {
let mut root = ResourceMap::new(ResourceDef::root_prefix("")); let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
user_map.add(&mut ResourceDef::new("/"), None); user_map.add(&mut ResourceDef::new("/"), None);
user_map.add(&mut ResourceDef::new("/profile"), None); user_map.add(&mut ResourceDef::new("/profile"), None);
user_map.add(&mut ResourceDef::new("/article/{id}"), None); user_map.add(&mut ResourceDef::new("/article/{id}"), None);
@ -271,9 +207,10 @@ mod tests {
&mut ResourceDef::root_prefix("/user/{id}"), &mut ResourceDef::root_prefix("/user/{id}"),
Some(Rc::new(user_map)), Some(Rc::new(user_map)),
); );
root.add(&mut ResourceDef::new("/info"), None);
let root = Rc::new(root); let root = Rc::new(root);
root.finish(Rc::clone(&root)); ResourceMap::finish(&root);
// sanity check resource map setup // sanity check resource map setup
@ -284,7 +221,7 @@ mod tests {
assert!(root.has_resource("/v2")); assert!(root.has_resource("/v2"));
assert!(!root.has_resource("/v33")); assert!(!root.has_resource("/v33"));
assert!(root.has_resource("/user/22")); assert!(!root.has_resource("/user/22"));
assert!(root.has_resource("/user/22/")); assert!(root.has_resource("/user/22/"));
assert!(root.has_resource("/user/22/profile")); assert!(root.has_resource("/user/22/profile"));
@ -329,15 +266,15 @@ mod tests {
let mut root = ResourceMap::new(ResourceDef::root_prefix("")); let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
let mut rdef = ResourceDef::new("/info"); let mut rdef = ResourceDef::new("/info");
*rdef.name_mut() = "root_info".to_owned(); rdef.set_name("root_info");
root.add(&mut rdef, None); root.add(&mut rdef, None);
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
let mut rdef = ResourceDef::new("/"); let mut rdef = ResourceDef::new("/");
user_map.add(&mut rdef, None); user_map.add(&mut rdef, None);
let mut rdef = ResourceDef::new("/post/{post_id}"); let mut rdef = ResourceDef::new("/post/{post_id}");
*rdef.name_mut() = "user_post".to_owned(); rdef.set_name("user_post");
user_map.add(&mut rdef, None); user_map.add(&mut rdef, None);
root.add( root.add(
@ -346,14 +283,14 @@ mod tests {
); );
let root = Rc::new(root); let root = Rc::new(root);
root.finish(Rc::clone(&root)); ResourceMap::finish(&root);
// sanity check resource map setup // sanity check resource map setup
assert!(root.has_resource("/info")); assert!(root.has_resource("/info"));
assert!(!root.has_resource("/bar")); assert!(!root.has_resource("/bar"));
assert!(root.has_resource("/user/22")); assert!(!root.has_resource("/user/22"));
assert!(root.has_resource("/user/22/")); assert!(root.has_resource("/user/22/"));
assert!(root.has_resource("/user/22/post/55")); assert!(root.has_resource("/user/22/post/55"));
@ -373,7 +310,7 @@ mod tests {
// ref: https://github.com/actix/actix-web/issues/1582 // ref: https://github.com/actix/actix-web/issues/1582
let mut root = ResourceMap::new(ResourceDef::root_prefix("")); let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
user_map.add(&mut ResourceDef::new("/"), None); user_map.add(&mut ResourceDef::new("/"), None);
user_map.add(&mut ResourceDef::new("/profile"), None); user_map.add(&mut ResourceDef::new("/profile"), None);
user_map.add(&mut ResourceDef::new("/article/{id}"), None); user_map.add(&mut ResourceDef::new("/article/{id}"), None);
@ -389,20 +326,119 @@ mod tests {
); );
let root = Rc::new(root); let root = Rc::new(root);
root.finish(Rc::clone(&root)); ResourceMap::finish(&root);
// check root has no parent // check root has no parent
assert!(root.parent.borrow().upgrade().is_none()); assert!(root.parent.borrow().upgrade().is_none());
// check child has parent reference // check child has parent reference
assert!(root.patterns[0].1.is_some()); assert!(root.nodes.as_ref().unwrap()[0]
.parent
.borrow()
.upgrade()
.is_some());
// check child's parent root id matches root's root id // check child's parent root id matches root's root id
assert_eq!( assert!(Rc::ptr_eq(
root.patterns[0].1.as_ref().unwrap().root.id(), &root.nodes.as_ref().unwrap()[0]
root.root.id() .parent
); .borrow()
.upgrade()
.unwrap(),
&root
));
let output = format!("{:?}", root); let output = format!("{:?}", root);
assert!(output.starts_with("ResourceMap {")); assert!(output.starts_with("ResourceMap {"));
assert!(output.ends_with(" }")); assert!(output.ends_with(" }"));
} }
#[test]
fn short_circuit() {
let mut root = ResourceMap::new(ResourceDef::prefix(""));
let mut user_root = ResourceDef::prefix("/user");
let mut user_map = ResourceMap::new(user_root.clone());
user_map.add(&mut ResourceDef::new("/u1"), None);
user_map.add(&mut ResourceDef::new("/u2"), None);
root.add(&mut ResourceDef::new("/user/u3"), None);
root.add(&mut user_root, Some(Rc::new(user_map)));
root.add(&mut ResourceDef::new("/user/u4"), None);
let rmap = Rc::new(root);
ResourceMap::finish(&rmap);
assert!(rmap.has_resource("/user/u1"));
assert!(rmap.has_resource("/user/u2"));
assert!(rmap.has_resource("/user/u3"));
assert!(!rmap.has_resource("/user/u4"));
}
#[test]
fn url_for() {
let mut root = ResourceMap::new(ResourceDef::prefix(""));
let mut user_scope_rdef = ResourceDef::prefix("/user");
let mut user_scope_map = ResourceMap::new(user_scope_rdef.clone());
let mut user_rdef = ResourceDef::new("/{user_id}");
let mut user_map = ResourceMap::new(user_rdef.clone());
let mut post_rdef = ResourceDef::new("/post/{sub_id}");
post_rdef.set_name("post");
user_map.add(&mut post_rdef, None);
user_scope_map.add(&mut user_rdef, Some(Rc::new(user_map)));
root.add(&mut user_scope_rdef, Some(Rc::new(user_scope_map)));
let rmap = Rc::new(root);
ResourceMap::finish(&rmap);
let mut req = crate::test::TestRequest::default();
req.set_server_hostname("localhost:8888");
let req = req.to_http_request();
let url = rmap
.url_for(&req, "post", &["u123", "foobar"])
.unwrap()
.to_string();
assert_eq!(url, "http://localhost:8888/user/u123/post/foobar");
assert!(rmap.url_for(&req, "missing", &["u123"]).is_err());
}
#[test]
fn external_resource_with_no_name() {
let mut root = ResourceMap::new(ResourceDef::prefix(""));
let mut rdef = ResourceDef::new("https://duck.com/{query}");
root.add(&mut rdef, None);
let rmap = Rc::new(root);
ResourceMap::finish(&rmap);
assert!(!rmap.has_resource("https://duck.com/abc"));
}
#[test]
fn external_resource_with_name() {
let mut root = ResourceMap::new(ResourceDef::prefix(""));
let mut rdef = ResourceDef::new("https://duck.com/{query}");
rdef.set_name("duck");
root.add(&mut rdef, None);
let rmap = Rc::new(root);
ResourceMap::finish(&rmap);
assert!(!rmap.has_resource("https://duck.com/abc"));
let mut req = crate::test::TestRequest::default();
req.set_server_hostname("localhost:8888");
let req = req.to_http_request();
assert_eq!(
rmap.url_for(&req, "duck", &["abcd"]).unwrap().to_string(),
"https://duck.com/abcd"
);
}
} }

View File

@ -41,9 +41,9 @@ type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Err
/// fn main() { /// fn main() {
/// let app = App::new().service( /// let app = App::new().service(
/// web::scope("/{project_id}/") /// web::scope("/{project_id}/")
/// .service(web::resource("/path1").to(|| async { HttpResponse::Ok() })) /// .service(web::resource("/path1").to(|| async { "OK" }))
/// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok()))) /// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok())))
/// .service(web::resource("/path3").route(web::head().to(|| HttpResponse::MethodNotAllowed()))) /// .service(web::resource("/path3").route(web::head().to(HttpResponse::MethodNotAllowed)))
/// ); /// );
/// } /// }
/// ``` /// ```
@ -530,7 +530,7 @@ impl Service<ServiceRequest> for ScopeService {
actix_service::always_ready!(); actix_service::always_ready!();
fn call(&self, mut req: ServiceRequest) -> Self::Future { fn call(&self, mut req: ServiceRequest) -> Self::Future {
let res = self.router.recognize_checked(&mut req, |req, guards| { let res = self.router.recognize_fn(&mut req, |req, guards| {
if let Some(ref guards) = guards { if let Some(ref guards) = guards {
for f in guards { for f in guards {
if !f.check(req.head()) { if !f.check(req.head()) {
@ -1153,4 +1153,70 @@ mod tests {
Bytes::from_static(b"http://localhost:8080/a/b/c/12345") Bytes::from_static(b"http://localhost:8080/a/b/c/12345")
); );
} }
#[actix_rt::test]
async fn dynamic_scopes() {
let srv = init_service(
App::new().service(
web::scope("/{a}/").service(
web::scope("/{b}/")
.route("", web::get().to(|_: HttpRequest| HttpResponse::Created()))
.route(
"/",
web::get().to(|_: HttpRequest| HttpResponse::Accepted()),
)
.route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())),
),
),
)
.await;
// note the unintuitive behavior with trailing slashes on scopes with dynamic segments
let req = TestRequest::with_uri("/a//b//c").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::OK);
let req = TestRequest::with_uri("/a//b/").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::CREATED);
let req = TestRequest::with_uri("/a//b//").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::ACCEPTED);
let req = TestRequest::with_uri("/a//b//c/d").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
let srv = init_service(
App::new().service(
web::scope("/{a}").service(
web::scope("/{b}")
.route("", web::get().to(|_: HttpRequest| HttpResponse::Created()))
.route(
"/",
web::get().to(|_: HttpRequest| HttpResponse::Accepted()),
)
.route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())),
),
),
)
.await;
let req = TestRequest::with_uri("/a/b/c").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::OK);
let req = TestRequest::with_uri("/a/b").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::CREATED);
let req = TestRequest::with_uri("/a/b/").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::ACCEPTED);
let req = TestRequest::with_uri("/a/b/c/d").to_request();
let resp = call_service(&srv, req).await;
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
}
} }

View File

@ -7,14 +7,14 @@ use actix_http::{
http::{HeaderMap, Method, StatusCode, Uri, Version}, http::{HeaderMap, Method, StatusCode, Uri, Version},
Extensions, HttpMessage, Payload, PayloadStream, RequestHead, Response, ResponseHead, Extensions, HttpMessage, Payload, PayloadStream, RequestHead, Response, ResponseHead,
}; };
use actix_router::{IntoPattern, Path, Resource, ResourceDef, Url}; use actix_router::{IntoPatterns, Path, Patterns, Resource, ResourceDef, Url};
use actix_service::{IntoServiceFactory, ServiceFactory}; use actix_service::{IntoServiceFactory, ServiceFactory};
#[cfg(feature = "cookies")] #[cfg(feature = "cookies")]
use cookie::{Cookie, ParseError as CookieParseError}; use cookie::{Cookie, ParseError as CookieParseError};
use crate::{ use crate::{
config::{AppConfig, AppService}, config::{AppConfig, AppService},
dev::insert_leading_slash, dev::ensure_leading_slash,
guard::Guard, guard::Guard,
info::ConnectionInfo, info::ConnectionInfo,
rmap::ResourceMap, rmap::ResourceMap,
@ -117,7 +117,7 @@ impl ServiceRequest {
/// This method returns reference to the request head /// This method returns reference to the request head
#[inline] #[inline]
pub fn head(&self) -> &RequestHead { pub fn head(&self) -> &RequestHead {
&self.req.head() self.req.head()
} }
/// This method returns reference to the request head /// This method returns reference to the request head
@ -212,14 +212,14 @@ impl ServiceRequest {
self.req.match_pattern() self.req.match_pattern()
} }
#[inline]
/// Get a mutable reference to the Path parameters. /// Get a mutable reference to the Path parameters.
#[inline]
pub fn match_info_mut(&mut self) -> &mut Path<Url> { pub fn match_info_mut(&mut self) -> &mut Path<Url> {
self.req.match_info_mut() self.req.match_info_mut()
} }
#[inline]
/// Get a reference to a `ResourceMap` of current application. /// Get a reference to a `ResourceMap` of current application.
#[inline]
pub fn resource_map(&self) -> &ResourceMap { pub fn resource_map(&self) -> &ResourceMap {
self.req.resource_map() self.req.resource_map()
} }
@ -459,14 +459,14 @@ where
} }
pub struct WebService { pub struct WebService {
rdef: Vec<String>, rdef: Patterns,
name: Option<String>, name: Option<String>,
guards: Vec<Box<dyn Guard>>, guards: Vec<Box<dyn Guard>>,
} }
impl WebService { impl WebService {
/// Create new `WebService` instance. /// Create new `WebService` instance.
pub fn new<T: IntoPattern>(path: T) -> Self { pub fn new<T: IntoPatterns>(path: T) -> Self {
WebService { WebService {
rdef: path.patterns(), rdef: path.patterns(),
name: None, name: None,
@ -476,7 +476,7 @@ impl WebService {
/// Set service name. /// Set service name.
/// ///
/// Name is used for url generation. /// Name is used for URL generation.
pub fn name(mut self, name: &str) -> Self { pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string()); self.name = Some(name.to_string());
self self
@ -528,7 +528,7 @@ impl WebService {
struct WebServiceImpl<T> { struct WebServiceImpl<T> {
srv: T, srv: T,
rdef: Vec<String>, rdef: Patterns,
name: Option<String>, name: Option<String>,
guards: Vec<Box<dyn Guard>>, guards: Vec<Box<dyn Guard>>,
} }
@ -551,13 +551,15 @@ where
}; };
let mut rdef = if config.is_root() || !self.rdef.is_empty() { let mut rdef = if config.is_root() || !self.rdef.is_empty() {
ResourceDef::new(insert_leading_slash(self.rdef)) ResourceDef::new(ensure_leading_slash(self.rdef))
} else { } else {
ResourceDef::new(self.rdef) ResourceDef::new(self.rdef)
}; };
if let Some(ref name) = self.name { if let Some(ref name) = self.name {
*rdef.name_mut() = name.clone(); rdef.set_name(name);
} }
config.register_service(rdef, guards, self.srv, None) config.register_service(rdef, guards, self.srv, None)
} }
} }

Some files were not shown because too many files have changed in this diff Show More