mirror of https://github.com/fafhrd91/actix-web
Merge branch 'master' into custom_headers_in_redirects
This commit is contained in:
commit
a4662a61ea
|
@ -4,10 +4,15 @@
|
||||||
### Added
|
### Added
|
||||||
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
* Re-export actix-service `ServiceFactory` in `dev` module. [#2325]
|
||||||
|
|
||||||
### Changes
|
### Changed
|
||||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344]
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||||
|
|
||||||
[#2325]: https://github.com/actix/actix-web/pull/2325
|
[#2325]: https://github.com/actix/actix-web/pull/2325
|
||||||
|
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||||
|
|
||||||
|
|
||||||
## 4.0.0-beta.8 - 2021-06-26
|
## 4.0.0-beta.8 - 2021-06-26
|
||||||
|
|
|
@ -99,7 +99,7 @@ regex = "1.4"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_urlencoded = "0.7"
|
serde_urlencoded = "0.7"
|
||||||
smallvec = "1.6"
|
smallvec = "1.6.1"
|
||||||
socket2 = "0.4.0"
|
socket2 = "0.4.0"
|
||||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||||
url = "2.1"
|
url = "2.1"
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
# Changes
|
# Changes
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2021-xx-xx
|
||||||
### Changes
|
### Changed
|
||||||
|
* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377]
|
||||||
* Minimum supported Rust version (MSRV) is now 1.51.
|
* Minimum supported Rust version (MSRV) is now 1.51.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||||
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
|
* Remove `Into<Error>` bound on `Encoder` body types. [#2375]
|
||||||
|
* Fix quality parse error in Accept-Encoding header. [#2344]
|
||||||
|
|
||||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||||
[#2375]: https://github.com/actix/actix-web/pull/2375
|
[#2375]: https://github.com/actix/actix-web/pull/2375
|
||||||
|
[#2344]: https://github.com/actix/actix-web/pull/2344
|
||||||
|
[#2377]: https://github.com/actix/actix-web/pull/2377
|
||||||
|
|
||||||
|
|
||||||
## 3.0.0-beta.8 - 2021-08-09
|
## 3.0.0-beta.8 - 2021-08-09
|
||||||
|
|
|
@ -73,7 +73,7 @@ rand = "0.8"
|
||||||
regex = "1.3"
|
regex = "1.3"
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
sha-1 = "0.9"
|
sha-1 = "0.9"
|
||||||
smallvec = "1.6"
|
smallvec = "1.6.1"
|
||||||
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
time = { version = "0.2.23", default-features = false, features = ["std"] }
|
||||||
tokio = { version = "1.2", features = ["sync"] }
|
tokio = { version = "1.2", features = ["sync"] }
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ where
|
||||||
let encoding = headers
|
let encoding = headers
|
||||||
.get(&CONTENT_ENCODING)
|
.get(&CONTENT_ENCODING)
|
||||||
.and_then(|val| val.to_str().ok())
|
.and_then(|val| val.to_str().ok())
|
||||||
.map(ContentEncoding::from)
|
.and_then(|x| x.parse().ok())
|
||||||
.unwrap_or(ContentEncoding::Identity);
|
.unwrap_or(ContentEncoding::Identity);
|
||||||
|
|
||||||
Self::new(stream, encoding)
|
Self::new(stream, encoding)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::{convert::Infallible, str::FromStr};
|
use std::{convert::TryFrom, str::FromStr};
|
||||||
|
|
||||||
|
use derive_more::{Display, Error};
|
||||||
use http::header::InvalidHeaderValue;
|
use http::header::InvalidHeaderValue;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -8,8 +9,16 @@ use crate::{
|
||||||
HttpMessage,
|
HttpMessage,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Error return when a content encoding is unknown.
|
||||||
|
///
|
||||||
|
/// Example: 'compress'
|
||||||
|
#[derive(Debug, Display, Error)]
|
||||||
|
#[display(fmt = "unsupported content encoding")]
|
||||||
|
pub struct ContentEncodingParseError;
|
||||||
|
|
||||||
/// Represents a supported content encoding.
|
/// Represents a supported content encoding.
|
||||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
#[non_exhaustive]
|
||||||
pub enum ContentEncoding {
|
pub enum ContentEncoding {
|
||||||
/// Automatically select encoding based on encoding negotiation.
|
/// Automatically select encoding based on encoding negotiation.
|
||||||
Auto,
|
Auto,
|
||||||
|
@ -37,7 +46,7 @@ impl ContentEncoding {
|
||||||
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
matches!(self, ContentEncoding::Identity | ContentEncoding::Auto)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert content encoding to string
|
/// Convert content encoding to string.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn as_str(self) -> &'static str {
|
pub fn as_str(self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
@ -48,18 +57,6 @@ impl ContentEncoding {
|
||||||
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
ContentEncoding::Identity | ContentEncoding::Auto => "identity",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Default Q-factor (quality) value.
|
|
||||||
#[inline]
|
|
||||||
pub fn quality(self) -> f64 {
|
|
||||||
match self {
|
|
||||||
ContentEncoding::Br => 1.1,
|
|
||||||
ContentEncoding::Gzip => 1.0,
|
|
||||||
ContentEncoding::Deflate => 0.9,
|
|
||||||
ContentEncoding::Identity | ContentEncoding::Auto => 0.1,
|
|
||||||
ContentEncoding::Zstd => 0.0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ContentEncoding {
|
impl Default for ContentEncoding {
|
||||||
|
@ -69,31 +66,33 @@ impl Default for ContentEncoding {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for ContentEncoding {
|
impl FromStr for ContentEncoding {
|
||||||
type Err = Infallible;
|
type Err = ContentEncodingParseError;
|
||||||
|
|
||||||
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
fn from_str(val: &str) -> Result<Self, Self::Err> {
|
||||||
Ok(Self::from(val))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for ContentEncoding {
|
|
||||||
fn from(val: &str) -> ContentEncoding {
|
|
||||||
let val = val.trim();
|
let val = val.trim();
|
||||||
|
|
||||||
if val.eq_ignore_ascii_case("br") {
|
if val.eq_ignore_ascii_case("br") {
|
||||||
ContentEncoding::Br
|
Ok(ContentEncoding::Br)
|
||||||
} else if val.eq_ignore_ascii_case("gzip") {
|
} else if val.eq_ignore_ascii_case("gzip") {
|
||||||
ContentEncoding::Gzip
|
Ok(ContentEncoding::Gzip)
|
||||||
} else if val.eq_ignore_ascii_case("deflate") {
|
} else if val.eq_ignore_ascii_case("deflate") {
|
||||||
ContentEncoding::Deflate
|
Ok(ContentEncoding::Deflate)
|
||||||
} else if val.eq_ignore_ascii_case("zstd") {
|
} else if val.eq_ignore_ascii_case("zstd") {
|
||||||
ContentEncoding::Zstd
|
Ok(ContentEncoding::Zstd)
|
||||||
} else {
|
} else {
|
||||||
ContentEncoding::default()
|
Err(ContentEncodingParseError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFrom<&str> for ContentEncoding {
|
||||||
|
type Error = ContentEncodingParseError;
|
||||||
|
|
||||||
|
fn try_from(val: &str) -> Result<Self, Self::Error> {
|
||||||
|
val.parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl IntoHeaderValue for ContentEncoding {
|
impl IntoHeaderValue for ContentEncoding {
|
||||||
type Error = InvalidHeaderValue;
|
type Error = InvalidHeaderValue;
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
use std::{
|
use std::{
|
||||||
cmp,
|
cmp,
|
||||||
convert::{TryFrom, TryInto},
|
convert::{TryFrom, TryInto},
|
||||||
fmt, str,
|
fmt,
|
||||||
|
str::{self, FromStr},
|
||||||
};
|
};
|
||||||
|
|
||||||
use derive_more::{Display, Error};
|
use derive_more::{Display, Error};
|
||||||
|
|
||||||
|
use crate::error::ParseError;
|
||||||
|
|
||||||
const MAX_QUALITY: u16 = 1000;
|
const MAX_QUALITY: u16 = 1000;
|
||||||
const MAX_FLOAT_QUALITY: f32 = 1.0;
|
const MAX_FLOAT_QUALITY: f32 = 1.0;
|
||||||
|
|
||||||
|
@ -113,12 +116,12 @@ impl<T: fmt::Display> fmt::Display for QualityItem<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
impl<T: FromStr> FromStr for QualityItem<T> {
|
||||||
type Err = crate::error::ParseError;
|
type Err = ParseError;
|
||||||
|
|
||||||
fn from_str(qitem_str: &str) -> Result<QualityItem<T>, crate::error::ParseError> {
|
fn from_str(qitem_str: &str) -> Result<Self, Self::Err> {
|
||||||
if !qitem_str.is_ascii() {
|
if !qitem_str.is_ascii() {
|
||||||
return Err(crate::error::ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set defaults used if parsing fails.
|
// Set defaults used if parsing fails.
|
||||||
|
@ -139,7 +142,7 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||||
if parts[0].len() < 2 {
|
if parts[0].len() < 2 {
|
||||||
// Can't possibly be an attribute since an attribute needs at least a name followed
|
// Can't possibly be an attribute since an attribute needs at least a name followed
|
||||||
// by an equals sign. And bare identifiers are forbidden.
|
// by an equals sign. And bare identifiers are forbidden.
|
||||||
return Err(crate::error::ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = &parts[0][0..2];
|
let start = &parts[0][0..2];
|
||||||
|
@ -148,25 +151,21 @@ impl<T: str::FromStr> str::FromStr for QualityItem<T> {
|
||||||
let q_val = &parts[0][2..];
|
let q_val = &parts[0][2..];
|
||||||
if q_val.len() > 5 {
|
if q_val.len() > 5 {
|
||||||
// longer than 5 indicates an over-precise q-factor
|
// longer than 5 indicates an over-precise q-factor
|
||||||
return Err(crate::error::ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
|
|
||||||
let q_value = q_val
|
let q_value = q_val.parse::<f32>().map_err(|_| ParseError::Header)?;
|
||||||
.parse::<f32>()
|
|
||||||
.map_err(|_| crate::error::ParseError::Header)?;
|
|
||||||
|
|
||||||
if (0f32..=1f32).contains(&q_value) {
|
if (0f32..=1f32).contains(&q_value) {
|
||||||
quality = q_value;
|
quality = q_value;
|
||||||
raw_item = parts[1];
|
raw_item = parts[1];
|
||||||
} else {
|
} else {
|
||||||
return Err(crate::error::ParseError::Header);
|
return Err(ParseError::Header);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let item = raw_item
|
let item = raw_item.parse::<T>().map_err(|_| ParseError::Header)?;
|
||||||
.parse::<T>()
|
|
||||||
.map_err(|_| crate::error::ParseError::Header)?;
|
|
||||||
|
|
||||||
// we already checked above that the quality is within range
|
// we already checked above that the quality is within range
|
||||||
Ok(QualityItem::new(item, Quality::from_f32(quality)))
|
Ok(QualityItem::new(item, Quality::from_f32(quality)))
|
||||||
|
@ -224,7 +223,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl str::FromStr for Encoding {
|
impl FromStr for Encoding {
|
||||||
type Err = crate::error::ParseError;
|
type Err = crate::error::ParseError;
|
||||||
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
fn from_str(s: &str) -> Result<Encoding, crate::error::ParseError> {
|
||||||
use Encoding::*;
|
use Encoding::*;
|
||||||
|
|
|
@ -79,7 +79,7 @@ where
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.for_each(|mut srv| srv.register(&mut config));
|
.for_each(|mut srv| srv.register(&mut config));
|
||||||
|
|
||||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
|
|
||||||
let (config, services) = config.into_services();
|
let (config, services) = config.into_services();
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ where
|
||||||
|
|
||||||
// complete ResourceMap tree creation
|
// complete ResourceMap tree creation
|
||||||
let rmap = Rc::new(rmap);
|
let rmap = Rc::new(rmap);
|
||||||
rmap.finish(rmap.clone());
|
ResourceMap::finish(&rmap);
|
||||||
|
|
||||||
// construct all async data factory futures
|
// construct all async data factory futures
|
||||||
let factory_futs = join_all(self.async_data_factories.iter().map(|f| f()));
|
let factory_futs = join_all(self.async_data_factories.iter().map(|f| f()));
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cmp,
|
cmp,
|
||||||
|
convert::TryFrom,
|
||||||
future::Future,
|
future::Future,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
str::FromStr,
|
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -13,16 +13,18 @@ use actix_http::{
|
||||||
body::{MessageBody, ResponseBody},
|
body::{MessageBody, ResponseBody},
|
||||||
encoding::Encoder,
|
encoding::Encoder,
|
||||||
http::header::{ContentEncoding, ACCEPT_ENCODING},
|
http::header::{ContentEncoding, ACCEPT_ENCODING},
|
||||||
|
StatusCode,
|
||||||
};
|
};
|
||||||
use actix_service::{Service, Transform};
|
use actix_service::{Service, Transform};
|
||||||
use actix_utils::future::{ok, Ready};
|
use actix_utils::future::{ok, Either, Ready};
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
use pin_project::pin_project;
|
use pin_project::pin_project;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
dev::BodyEncoding,
|
dev::BodyEncoding,
|
||||||
service::{ServiceRequest, ServiceResponse},
|
service::{ServiceRequest, ServiceResponse},
|
||||||
Error,
|
Error, HttpResponse,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Middleware for compressing response payloads.
|
/// Middleware for compressing response payloads.
|
||||||
|
@ -78,34 +80,78 @@ pub struct CompressMiddleware<S> {
|
||||||
encoding: ContentEncoding,
|
encoding: ContentEncoding,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static SUPPORTED_ALGORITHM_NAMES: Lazy<String> = Lazy::new(|| {
|
||||||
|
let mut encoding = vec![];
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-brotli")]
|
||||||
|
{
|
||||||
|
encoding.push("br");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-gzip")]
|
||||||
|
{
|
||||||
|
encoding.push("gzip");
|
||||||
|
encoding.push("deflate");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "compress-zstd")]
|
||||||
|
encoding.push("zstd");
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
!encoding.is_empty(),
|
||||||
|
"encoding can not be empty unless __compress feature has been explicitly enabled by itself"
|
||||||
|
);
|
||||||
|
|
||||||
|
encoding.join(", ")
|
||||||
|
});
|
||||||
|
|
||||||
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
|
impl<S, B> Service<ServiceRequest> for CompressMiddleware<S>
|
||||||
where
|
where
|
||||||
B: MessageBody,
|
|
||||||
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||||
|
B: MessageBody,
|
||||||
{
|
{
|
||||||
type Response = ServiceResponse<ResponseBody<Encoder<B>>>;
|
type Response = ServiceResponse<ResponseBody<Encoder<B>>>;
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = CompressResponse<S, B>;
|
type Future = Either<CompressResponse<S, B>, Ready<Result<Self::Response, Self::Error>>>;
|
||||||
|
|
||||||
actix_service::forward_ready!(service);
|
actix_service::forward_ready!(service);
|
||||||
|
|
||||||
#[allow(clippy::borrow_interior_mutable_const)]
|
#[allow(clippy::borrow_interior_mutable_const)]
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
// negotiate content-encoding
|
// negotiate content-encoding
|
||||||
let encoding = if let Some(val) = req.headers().get(&ACCEPT_ENCODING) {
|
let encoding_result = req
|
||||||
if let Ok(enc) = val.to_str() {
|
.headers()
|
||||||
AcceptEncoding::parse(enc, self.encoding)
|
.get(&ACCEPT_ENCODING)
|
||||||
} else {
|
.and_then(|val| val.to_str().ok())
|
||||||
ContentEncoding::Identity
|
.map(|enc| AcceptEncoding::try_parse(enc, self.encoding));
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ContentEncoding::Identity
|
|
||||||
};
|
|
||||||
|
|
||||||
CompressResponse {
|
match encoding_result {
|
||||||
|
// Missing header => fallback to identity
|
||||||
|
None => Either::left(CompressResponse {
|
||||||
|
encoding: ContentEncoding::Identity,
|
||||||
|
fut: self.service.call(req),
|
||||||
|
_phantom: PhantomData,
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Valid encoding
|
||||||
|
Some(Ok(encoding)) => Either::left(CompressResponse {
|
||||||
encoding,
|
encoding,
|
||||||
fut: self.service.call(req),
|
fut: self.service.call(req),
|
||||||
_phantom: PhantomData,
|
_phantom: PhantomData,
|
||||||
|
}),
|
||||||
|
|
||||||
|
// There is an HTTP header but we cannot match what client as asked for
|
||||||
|
Some(Err(_)) => {
|
||||||
|
let res = HttpResponse::with_body(
|
||||||
|
StatusCode::NOT_ACCEPTABLE,
|
||||||
|
SUPPORTED_ALGORITHM_NAMES.as_str(),
|
||||||
|
);
|
||||||
|
let enc = ContentEncoding::Identity;
|
||||||
|
|
||||||
|
Either::right(ok(req.into_response(res.map_body(move |head, body| {
|
||||||
|
Encoder::response(enc, head, ResponseBody::Other(body.into()))
|
||||||
|
}))))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +160,6 @@ where
|
||||||
pub struct CompressResponse<S, B>
|
pub struct CompressResponse<S, B>
|
||||||
where
|
where
|
||||||
S: Service<ServiceRequest>,
|
S: Service<ServiceRequest>,
|
||||||
B: MessageBody,
|
|
||||||
{
|
{
|
||||||
#[pin]
|
#[pin]
|
||||||
fut: S::Future,
|
fut: S::Future,
|
||||||
|
@ -151,6 +196,7 @@ where
|
||||||
|
|
||||||
struct AcceptEncoding {
|
struct AcceptEncoding {
|
||||||
encoding: ContentEncoding,
|
encoding: ContentEncoding,
|
||||||
|
// TODO: use Quality or QualityItem<ContentEncoding>
|
||||||
quality: f64,
|
quality: f64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,26 +223,56 @@ impl PartialOrd for AcceptEncoding {
|
||||||
|
|
||||||
impl PartialEq for AcceptEncoding {
|
impl PartialEq for AcceptEncoding {
|
||||||
fn eq(&self, other: &AcceptEncoding) -> bool {
|
fn eq(&self, other: &AcceptEncoding) -> bool {
|
||||||
self.quality == other.quality
|
self.encoding == other.encoding && self.quality == other.quality
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse q-factor from quality strings.
|
||||||
|
///
|
||||||
|
/// If parse fail, then fallback to default value which is 1.
|
||||||
|
/// More details available here: <https://developer.mozilla.org/en-US/docs/Glossary/Quality_values>
|
||||||
|
fn parse_quality(parts: &[&str]) -> f64 {
|
||||||
|
for part in parts {
|
||||||
|
if part.trim().starts_with("q=") {
|
||||||
|
return part[2..].parse().unwrap_or(1.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
1.0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
enum AcceptEncodingError {
|
||||||
|
/// This error occurs when client only support compressed response and server do not have any
|
||||||
|
/// algorithm that match client accepted algorithms.
|
||||||
|
CompressionAlgorithmMismatch,
|
||||||
|
}
|
||||||
|
|
||||||
impl AcceptEncoding {
|
impl AcceptEncoding {
|
||||||
fn new(tag: &str) -> Option<AcceptEncoding> {
|
fn new(tag: &str) -> Option<AcceptEncoding> {
|
||||||
let parts: Vec<&str> = tag.split(';').collect();
|
let parts: Vec<&str> = tag.split(';').collect();
|
||||||
let encoding = match parts.len() {
|
let encoding = match parts.len() {
|
||||||
0 => return None,
|
0 => return None,
|
||||||
_ => ContentEncoding::from(parts[0]),
|
_ => match ContentEncoding::try_from(parts[0]) {
|
||||||
};
|
Err(_) => return None,
|
||||||
let quality = match parts.len() {
|
Ok(x) => x,
|
||||||
1 => encoding.quality(),
|
},
|
||||||
_ => f64::from_str(parts[1]).unwrap_or(0.0),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let quality = parse_quality(&parts[1..]);
|
||||||
|
if quality <= 0.0 || quality > 1.0 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
Some(AcceptEncoding { encoding, quality })
|
Some(AcceptEncoding { encoding, quality })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a raw Accept-Encoding header value into an ordered list.
|
/// Parse a raw Accept-Encoding header value into an ordered list then return the best match
|
||||||
pub fn parse(raw: &str, encoding: ContentEncoding) -> ContentEncoding {
|
/// based on middleware configuration.
|
||||||
|
pub fn try_parse(
|
||||||
|
raw: &str,
|
||||||
|
encoding: ContentEncoding,
|
||||||
|
) -> Result<ContentEncoding, AcceptEncodingError> {
|
||||||
let mut encodings = raw
|
let mut encodings = raw
|
||||||
.replace(' ', "")
|
.replace(' ', "")
|
||||||
.split(',')
|
.split(',')
|
||||||
|
@ -206,13 +282,90 @@ impl AcceptEncoding {
|
||||||
encodings.sort();
|
encodings.sort();
|
||||||
|
|
||||||
for enc in encodings {
|
for enc in encodings {
|
||||||
if encoding == ContentEncoding::Auto {
|
if encoding == ContentEncoding::Auto || encoding == enc.encoding {
|
||||||
return enc.encoding;
|
return Ok(enc.encoding);
|
||||||
} else if encoding == enc.encoding {
|
|
||||||
return encoding;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ContentEncoding::Identity
|
// Special case if user cannot accept uncompressed data.
|
||||||
|
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding
|
||||||
|
// TODO: account for whitespace
|
||||||
|
if raw.contains("*;q=0") || raw.contains("identity;q=0") {
|
||||||
|
return Err(AcceptEncodingError::CompressionAlgorithmMismatch);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ContentEncoding::Identity)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
macro_rules! assert_parse_eq {
|
||||||
|
($raw:expr, $result:expr) => {
|
||||||
|
assert_eq!(
|
||||||
|
AcceptEncoding::try_parse($raw, ContentEncoding::Auto),
|
||||||
|
Ok($result)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! assert_parse_fail {
|
||||||
|
($raw:expr) => {
|
||||||
|
assert!(AcceptEncoding::try_parse($raw, ContentEncoding::Auto).is_err());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_encoding() {
|
||||||
|
// Test simple case
|
||||||
|
assert_parse_eq!("br", ContentEncoding::Br);
|
||||||
|
assert_parse_eq!("gzip", ContentEncoding::Gzip);
|
||||||
|
assert_parse_eq!("deflate", ContentEncoding::Deflate);
|
||||||
|
assert_parse_eq!("zstd", ContentEncoding::Zstd);
|
||||||
|
|
||||||
|
// Test space, trim, missing values
|
||||||
|
assert_parse_eq!("br,,,,", ContentEncoding::Br);
|
||||||
|
assert_parse_eq!("gzip , br, zstd", ContentEncoding::Gzip);
|
||||||
|
|
||||||
|
// Test float number parsing
|
||||||
|
assert_parse_eq!("br;q=1 ,", ContentEncoding::Br);
|
||||||
|
assert_parse_eq!("br;q=1.0 , br", ContentEncoding::Br);
|
||||||
|
|
||||||
|
// Test wildcard
|
||||||
|
assert_parse_eq!("*", ContentEncoding::Identity);
|
||||||
|
assert_parse_eq!("*;q=1.0", ContentEncoding::Identity);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_encoding_qfactor_ordering() {
|
||||||
|
assert_parse_eq!("gzip, br, zstd", ContentEncoding::Gzip);
|
||||||
|
assert_parse_eq!("zstd, br, gzip", ContentEncoding::Zstd);
|
||||||
|
|
||||||
|
assert_parse_eq!("gzip;q=0.4, br;q=0.6", ContentEncoding::Br);
|
||||||
|
assert_parse_eq!("gzip;q=0.8, br;q=0.4", ContentEncoding::Gzip);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_encoding_qfactor_invalid() {
|
||||||
|
// Out of range
|
||||||
|
assert_parse_eq!("gzip;q=-5.0", ContentEncoding::Identity);
|
||||||
|
assert_parse_eq!("gzip;q=5.0", ContentEncoding::Identity);
|
||||||
|
|
||||||
|
// Disabled
|
||||||
|
assert_parse_eq!("gzip;q=0", ContentEncoding::Identity);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_compression_required() {
|
||||||
|
// Check we fallback to identity if there is an unsupported compression algorithm
|
||||||
|
assert_parse_eq!("compress", ContentEncoding::Identity);
|
||||||
|
|
||||||
|
// User do not want any compression
|
||||||
|
assert_parse_fail!("compress, identity;q=0");
|
||||||
|
assert_parse_fail!("compress, identity;q=0.0");
|
||||||
|
assert_parse_fail!("compress, *;q=0");
|
||||||
|
assert_parse_fail!("compress, *;q=0.0");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -511,7 +511,7 @@ mod tests {
|
||||||
let mut res = ResourceDef::new("/user/{name}.{ext}");
|
let mut res = ResourceDef::new("/user/{name}.{ext}");
|
||||||
res.set_name("index");
|
res.set_name("index");
|
||||||
|
|
||||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
rmap.add(&mut res, None);
|
rmap.add(&mut res, None);
|
||||||
assert!(rmap.has_resource("/user/test.html"));
|
assert!(rmap.has_resource("/user/test.html"));
|
||||||
assert!(!rmap.has_resource("/test/unknown"));
|
assert!(!rmap.has_resource("/test/unknown"));
|
||||||
|
@ -541,7 +541,7 @@ mod tests {
|
||||||
let mut rdef = ResourceDef::new("/index.html");
|
let mut rdef = ResourceDef::new("/index.html");
|
||||||
rdef.set_name("index");
|
rdef.set_name("index");
|
||||||
|
|
||||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
rmap.add(&mut rdef, None);
|
rmap.add(&mut rdef, None);
|
||||||
|
|
||||||
assert!(rmap.has_resource("/index.html"));
|
assert!(rmap.has_resource("/index.html"));
|
||||||
|
@ -562,7 +562,7 @@ mod tests {
|
||||||
let mut rdef = ResourceDef::new("/index.html");
|
let mut rdef = ResourceDef::new("/index.html");
|
||||||
rdef.set_name("index");
|
rdef.set_name("index");
|
||||||
|
|
||||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
rmap.add(&mut rdef, None);
|
rmap.add(&mut rdef, None);
|
||||||
|
|
||||||
assert!(rmap.has_resource("/index.html"));
|
assert!(rmap.has_resource("/index.html"));
|
||||||
|
@ -581,9 +581,8 @@ mod tests {
|
||||||
|
|
||||||
rdef.set_name("youtube");
|
rdef.set_name("youtube");
|
||||||
|
|
||||||
let mut rmap = ResourceMap::new(ResourceDef::new(""));
|
let mut rmap = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
rmap.add(&mut rdef, None);
|
rmap.add(&mut rdef, None);
|
||||||
assert!(rmap.has_resource("https://youtube.com/watch/unknown"));
|
|
||||||
|
|
||||||
let req = TestRequest::default().rmap(rmap).to_http_request();
|
let req = TestRequest::default().rmap(rmap).to_http_request();
|
||||||
let url = req.url_for("youtube", &["oHg5SJYRHA0"]);
|
let url = req.url_for("youtube", &["oHg5SJYRHA0"]);
|
||||||
|
|
396
src/rmap.rs
396
src/rmap.rs
|
@ -10,43 +10,75 @@ use crate::request::HttpRequest;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct ResourceMap {
|
pub struct ResourceMap {
|
||||||
root: ResourceDef,
|
pattern: ResourceDef,
|
||||||
|
|
||||||
|
/// Named resources within the tree or, for external resources,
|
||||||
|
/// it points to isolated nodes outside the tree.
|
||||||
|
named: AHashMap<String, Rc<ResourceMap>>,
|
||||||
|
|
||||||
parent: RefCell<Weak<ResourceMap>>,
|
parent: RefCell<Weak<ResourceMap>>,
|
||||||
named: AHashMap<String, ResourceDef>,
|
|
||||||
patterns: Vec<(ResourceDef, Option<Rc<ResourceMap>>)>,
|
/// Must be `None` for "edge" nodes.
|
||||||
|
nodes: Option<Vec<Rc<ResourceMap>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResourceMap {
|
impl ResourceMap {
|
||||||
|
/// Creates a _container_ node in the `ResourceMap` tree.
|
||||||
pub fn new(root: ResourceDef) -> Self {
|
pub fn new(root: ResourceDef) -> Self {
|
||||||
ResourceMap {
|
ResourceMap {
|
||||||
root,
|
pattern: root,
|
||||||
parent: RefCell::new(Weak::new()),
|
|
||||||
named: AHashMap::default(),
|
named: AHashMap::default(),
|
||||||
patterns: Vec::new(),
|
parent: RefCell::new(Weak::new()),
|
||||||
|
nodes: Some(Vec::new()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Adds a (possibly nested) resource.
|
||||||
|
///
|
||||||
|
/// To add a non-prefix pattern, `nested` must be `None`.
|
||||||
|
/// To add external resource, supply a pattern without a leading `/`.
|
||||||
|
/// The root pattern of `nested`, if present, should match `pattern`.
|
||||||
pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option<Rc<ResourceMap>>) {
|
pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option<Rc<ResourceMap>>) {
|
||||||
pattern.set_id(self.patterns.len() as u16);
|
pattern.set_id(self.nodes.as_ref().unwrap().len() as u16);
|
||||||
self.patterns.push((pattern.clone(), nested));
|
|
||||||
|
if let Some(new_node) = nested {
|
||||||
|
assert_eq!(&new_node.pattern, pattern, "`patern` and `nested` mismatch");
|
||||||
|
self.named.extend(new_node.named.clone().into_iter());
|
||||||
|
self.nodes.as_mut().unwrap().push(new_node);
|
||||||
|
} else {
|
||||||
|
let new_node = Rc::new(ResourceMap {
|
||||||
|
pattern: pattern.clone(),
|
||||||
|
named: AHashMap::default(),
|
||||||
|
parent: RefCell::new(Weak::new()),
|
||||||
|
nodes: None,
|
||||||
|
});
|
||||||
|
|
||||||
if let Some(name) = pattern.name() {
|
if let Some(name) = pattern.name() {
|
||||||
self.named.insert(name.to_owned(), pattern.clone());
|
self.named.insert(name.to_owned(), Rc::clone(&new_node));
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_external = match pattern.pattern() {
|
||||||
|
Some(p) => !p.is_empty() && !p.starts_with('/'),
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Don't add external resources to the tree
|
||||||
|
if !is_external {
|
||||||
|
self.nodes.as_mut().unwrap().push(new_node);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn finish(&self, current: Rc<ResourceMap>) {
|
pub(crate) fn finish(self: &Rc<Self>) {
|
||||||
for (_, nested) in &self.patterns {
|
for node in self.nodes.iter().flatten() {
|
||||||
if let Some(ref nested) = nested {
|
node.parent.replace(Rc::downgrade(self));
|
||||||
*nested.parent.borrow_mut() = Rc::downgrade(¤t);
|
ResourceMap::finish(node);
|
||||||
nested.finish(nested.clone());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate url for named resource
|
/// Generate url for named resource
|
||||||
///
|
///
|
||||||
/// Check [`HttpRequest::url_for()`](../struct.HttpRequest.html#method.
|
/// Check [`HttpRequest::url_for`] for detailed information.
|
||||||
/// url_for) for detailed information.
|
|
||||||
pub fn url_for<U, I>(
|
pub fn url_for<U, I>(
|
||||||
&self,
|
&self,
|
||||||
req: &HttpRequest,
|
req: &HttpRequest,
|
||||||
|
@ -57,10 +89,19 @@ impl ResourceMap {
|
||||||
U: IntoIterator<Item = I>,
|
U: IntoIterator<Item = I>,
|
||||||
I: AsRef<str>,
|
I: AsRef<str>,
|
||||||
{
|
{
|
||||||
let mut path = String::new();
|
|
||||||
let mut elements = elements.into_iter();
|
let mut elements = elements.into_iter();
|
||||||
|
|
||||||
if self.patterns_for(name, &mut path, &mut elements)?.is_some() {
|
let path = self
|
||||||
|
.named
|
||||||
|
.get(name)
|
||||||
|
.ok_or(UrlGenerationError::ResourceNotFound)?
|
||||||
|
.root_rmap_fn(String::with_capacity(24), |mut acc, node| {
|
||||||
|
node.pattern
|
||||||
|
.resource_path_from_iter(&mut acc, &mut elements)
|
||||||
|
.then(|| acc)
|
||||||
|
})
|
||||||
|
.ok_or(UrlGenerationError::NotEnoughElements)?;
|
||||||
|
|
||||||
if path.starts_with('/') {
|
if path.starts_with('/') {
|
||||||
let conn = req.connection_info();
|
let conn = req.connection_info();
|
||||||
Ok(Url::parse(&format!(
|
Ok(Url::parse(&format!(
|
||||||
|
@ -72,182 +113,73 @@ impl ResourceMap {
|
||||||
} else {
|
} else {
|
||||||
Ok(Url::parse(&path)?)
|
Ok(Url::parse(&path)?)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
Err(UrlGenerationError::ResourceNotFound)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_resource(&self, path: &str) -> bool {
|
pub fn has_resource(&self, path: &str) -> bool {
|
||||||
let path = if path.is_empty() { "/" } else { path };
|
self.find_matching_node(path).is_some()
|
||||||
|
|
||||||
for (pattern, rmap) in &self.patterns {
|
|
||||||
if let Some(ref rmap) = rmap {
|
|
||||||
if let Some(pat_len) = pattern.find_match(path) {
|
|
||||||
return rmap.has_resource(&path[pat_len..]);
|
|
||||||
}
|
|
||||||
} else if pattern.is_match(path) || pattern.pattern() == Some("") && path == "/" {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the name of the route that matches the given path or None if no full match
|
/// Returns the name of the route that matches the given path or None if no full match
|
||||||
/// is possible.
|
/// is possible or the matching resource is not named.
|
||||||
pub fn match_name(&self, path: &str) -> Option<&str> {
|
pub fn match_name(&self, path: &str) -> Option<&str> {
|
||||||
let path = if path.is_empty() { "/" } else { path };
|
self.find_matching_node(path)?.pattern.name()
|
||||||
|
|
||||||
for (pattern, rmap) in &self.patterns {
|
|
||||||
if let Some(ref rmap) = rmap {
|
|
||||||
if let Some(plen) = pattern.find_match(path) {
|
|
||||||
return rmap.match_name(&path[plen..]);
|
|
||||||
}
|
|
||||||
} else if pattern.is_match(path) {
|
|
||||||
return pattern.name();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the full resource pattern matched against a path or None if no full match
|
/// Returns the full resource pattern matched against a path or None if no full match
|
||||||
/// is possible.
|
/// is possible.
|
||||||
pub fn match_pattern(&self, path: &str) -> Option<String> {
|
pub fn match_pattern(&self, path: &str) -> Option<String> {
|
||||||
let path = if path.is_empty() { "/" } else { path };
|
self.find_matching_node(path)?.root_rmap_fn(
|
||||||
|
String::with_capacity(24),
|
||||||
// ensure a full match exists
|
|mut acc, node| {
|
||||||
if !self.has_resource(path) {
|
acc.push_str(node.pattern.pattern()?);
|
||||||
return None;
|
Some(acc)
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(self.traverse_resource_pattern(path))
|
fn find_matching_node(&self, path: &str) -> Option<&ResourceMap> {
|
||||||
|
self._find_matching_node(path).flatten()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Takes remaining path and tries to match it up against a resource definition within the
|
/// Returns `None` if root pattern doesn't match;
|
||||||
/// current resource map recursively, returning a concatenation of all resource prefixes and
|
/// `Some(None)` if root pattern matches but there is no matching child pattern.
|
||||||
/// patterns matched in the tree.
|
/// Don't search sideways when `Some(none)` is returned.
|
||||||
///
|
fn _find_matching_node(&self, path: &str) -> Option<Option<&ResourceMap>> {
|
||||||
/// Should only be used after checking the resource exists in the map so that partial match
|
let matched_len = self.pattern.find_match(path)?;
|
||||||
/// patterns are not returned.
|
let path = &path[matched_len..];
|
||||||
fn traverse_resource_pattern(&self, remaining: &str) -> String {
|
|
||||||
for (pattern, rmap) in &self.patterns {
|
|
||||||
if let Some(ref rmap) = rmap {
|
|
||||||
if let Some(prefix_len) = pattern.find_match(remaining) {
|
|
||||||
// TODO: think about unwrap_or
|
|
||||||
let prefix = pattern.pattern().unwrap_or("").to_owned();
|
|
||||||
|
|
||||||
return [
|
Some(match &self.nodes {
|
||||||
prefix,
|
// find first sub-node to match remaining path
|
||||||
rmap.traverse_resource_pattern(&remaining[prefix_len..]),
|
Some(nodes) => nodes
|
||||||
]
|
.iter()
|
||||||
.concat();
|
.filter_map(|node| node._find_matching_node(path))
|
||||||
}
|
.next()
|
||||||
} else if pattern.is_match(remaining) {
|
.flatten(),
|
||||||
// TODO: think about unwrap_or
|
|
||||||
return pattern.pattern().unwrap_or("").to_owned();
|
// only terminate at edge nodes
|
||||||
}
|
None => Some(self),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
String::new()
|
/// Find `self`'s highest ancestor and then run `F`, providing `B`, in that rmap context.
|
||||||
}
|
fn root_rmap_fn<F, B>(&self, init: B, mut f: F) -> Option<B>
|
||||||
|
|
||||||
fn patterns_for<U, I>(
|
|
||||||
&self,
|
|
||||||
name: &str,
|
|
||||||
path: &mut String,
|
|
||||||
elements: &mut U,
|
|
||||||
) -> Result<Option<()>, UrlGenerationError>
|
|
||||||
where
|
where
|
||||||
U: Iterator<Item = I>,
|
F: FnMut(B, &ResourceMap) -> Option<B>,
|
||||||
I: AsRef<str>,
|
|
||||||
{
|
{
|
||||||
if self.pattern_for(name, path, elements)?.is_some() {
|
self._root_rmap_fn(init, &mut f)
|
||||||
Ok(Some(()))
|
|
||||||
} else {
|
|
||||||
self.parent_pattern_for(name, path, elements)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pattern_for<U, I>(
|
/// Run `F`, providing `B`, if `self` is top-level resource map, else recurse to parent map.
|
||||||
&self,
|
fn _root_rmap_fn<F, B>(&self, init: B, f: &mut F) -> Option<B>
|
||||||
name: &str,
|
|
||||||
path: &mut String,
|
|
||||||
elements: &mut U,
|
|
||||||
) -> Result<Option<()>, UrlGenerationError>
|
|
||||||
where
|
where
|
||||||
U: Iterator<Item = I>,
|
F: FnMut(B, &ResourceMap) -> Option<B>,
|
||||||
I: AsRef<str>,
|
|
||||||
{
|
{
|
||||||
if let Some(pattern) = self.named.get(name) {
|
let data = match self.parent.borrow().upgrade() {
|
||||||
if pattern
|
Some(ref parent) => parent._root_rmap_fn(init, f)?,
|
||||||
.pattern()
|
None => init,
|
||||||
.map(|pat| pat.starts_with('/'))
|
};
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
self.fill_root(path, elements)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if pattern.resource_path_from_iter(path, elements) {
|
f(data, self)
|
||||||
Ok(Some(()))
|
|
||||||
} else {
|
|
||||||
Err(UrlGenerationError::NotEnoughElements)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for (_, rmap) in &self.patterns {
|
|
||||||
if let Some(ref rmap) = rmap {
|
|
||||||
if rmap.pattern_for(name, path, elements)?.is_some() {
|
|
||||||
return Ok(Some(()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fill_root<U, I>(
|
|
||||||
&self,
|
|
||||||
path: &mut String,
|
|
||||||
elements: &mut U,
|
|
||||||
) -> Result<(), UrlGenerationError>
|
|
||||||
where
|
|
||||||
U: Iterator<Item = I>,
|
|
||||||
I: AsRef<str>,
|
|
||||||
{
|
|
||||||
if let Some(ref parent) = self.parent.borrow().upgrade() {
|
|
||||||
parent.fill_root(path, elements)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.root.resource_path_from_iter(path, elements) {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(UrlGenerationError::NotEnoughElements)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parent_pattern_for<U, I>(
|
|
||||||
&self,
|
|
||||||
name: &str,
|
|
||||||
path: &mut String,
|
|
||||||
elements: &mut U,
|
|
||||||
) -> Result<Option<()>, UrlGenerationError>
|
|
||||||
where
|
|
||||||
U: Iterator<Item = I>,
|
|
||||||
I: AsRef<str>,
|
|
||||||
{
|
|
||||||
if let Some(ref parent) = self.parent.borrow().upgrade() {
|
|
||||||
if let Some(pattern) = parent.named.get(name) {
|
|
||||||
self.fill_root(path, elements)?;
|
|
||||||
if pattern.resource_path_from_iter(path, elements) {
|
|
||||||
Ok(Some(()))
|
|
||||||
} else {
|
|
||||||
Err(UrlGenerationError::NotEnoughElements)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
parent.parent_pattern_for(name, path, elements)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,7 +191,7 @@ mod tests {
|
||||||
fn extract_matched_pattern() {
|
fn extract_matched_pattern() {
|
||||||
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||||
|
|
||||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||||
user_map.add(&mut ResourceDef::new("/"), None);
|
user_map.add(&mut ResourceDef::new("/"), None);
|
||||||
user_map.add(&mut ResourceDef::new("/profile"), None);
|
user_map.add(&mut ResourceDef::new("/profile"), None);
|
||||||
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
||||||
|
@ -275,9 +207,10 @@ mod tests {
|
||||||
&mut ResourceDef::root_prefix("/user/{id}"),
|
&mut ResourceDef::root_prefix("/user/{id}"),
|
||||||
Some(Rc::new(user_map)),
|
Some(Rc::new(user_map)),
|
||||||
);
|
);
|
||||||
|
root.add(&mut ResourceDef::new("/info"), None);
|
||||||
|
|
||||||
let root = Rc::new(root);
|
let root = Rc::new(root);
|
||||||
root.finish(Rc::clone(&root));
|
ResourceMap::finish(&root);
|
||||||
|
|
||||||
// sanity check resource map setup
|
// sanity check resource map setup
|
||||||
|
|
||||||
|
@ -288,7 +221,7 @@ mod tests {
|
||||||
assert!(root.has_resource("/v2"));
|
assert!(root.has_resource("/v2"));
|
||||||
assert!(!root.has_resource("/v33"));
|
assert!(!root.has_resource("/v33"));
|
||||||
|
|
||||||
assert!(root.has_resource("/user/22"));
|
assert!(!root.has_resource("/user/22"));
|
||||||
assert!(root.has_resource("/user/22/"));
|
assert!(root.has_resource("/user/22/"));
|
||||||
assert!(root.has_resource("/user/22/profile"));
|
assert!(root.has_resource("/user/22/profile"));
|
||||||
|
|
||||||
|
@ -336,7 +269,7 @@ mod tests {
|
||||||
rdef.set_name("root_info");
|
rdef.set_name("root_info");
|
||||||
root.add(&mut rdef, None);
|
root.add(&mut rdef, None);
|
||||||
|
|
||||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||||
let mut rdef = ResourceDef::new("/");
|
let mut rdef = ResourceDef::new("/");
|
||||||
user_map.add(&mut rdef, None);
|
user_map.add(&mut rdef, None);
|
||||||
|
|
||||||
|
@ -350,14 +283,14 @@ mod tests {
|
||||||
);
|
);
|
||||||
|
|
||||||
let root = Rc::new(root);
|
let root = Rc::new(root);
|
||||||
root.finish(Rc::clone(&root));
|
ResourceMap::finish(&root);
|
||||||
|
|
||||||
// sanity check resource map setup
|
// sanity check resource map setup
|
||||||
|
|
||||||
assert!(root.has_resource("/info"));
|
assert!(root.has_resource("/info"));
|
||||||
assert!(!root.has_resource("/bar"));
|
assert!(!root.has_resource("/bar"));
|
||||||
|
|
||||||
assert!(root.has_resource("/user/22"));
|
assert!(!root.has_resource("/user/22"));
|
||||||
assert!(root.has_resource("/user/22/"));
|
assert!(root.has_resource("/user/22/"));
|
||||||
assert!(root.has_resource("/user/22/post/55"));
|
assert!(root.has_resource("/user/22/post/55"));
|
||||||
|
|
||||||
|
@ -377,7 +310,7 @@ mod tests {
|
||||||
// ref: https://github.com/actix/actix-web/issues/1582
|
// ref: https://github.com/actix/actix-web/issues/1582
|
||||||
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
let mut root = ResourceMap::new(ResourceDef::root_prefix(""));
|
||||||
|
|
||||||
let mut user_map = ResourceMap::new(ResourceDef::root_prefix(""));
|
let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}"));
|
||||||
user_map.add(&mut ResourceDef::new("/"), None);
|
user_map.add(&mut ResourceDef::new("/"), None);
|
||||||
user_map.add(&mut ResourceDef::new("/profile"), None);
|
user_map.add(&mut ResourceDef::new("/profile"), None);
|
||||||
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
user_map.add(&mut ResourceDef::new("/article/{id}"), None);
|
||||||
|
@ -393,20 +326,119 @@ mod tests {
|
||||||
);
|
);
|
||||||
|
|
||||||
let root = Rc::new(root);
|
let root = Rc::new(root);
|
||||||
root.finish(Rc::clone(&root));
|
ResourceMap::finish(&root);
|
||||||
|
|
||||||
// check root has no parent
|
// check root has no parent
|
||||||
assert!(root.parent.borrow().upgrade().is_none());
|
assert!(root.parent.borrow().upgrade().is_none());
|
||||||
// check child has parent reference
|
// check child has parent reference
|
||||||
assert!(root.patterns[0].1.is_some());
|
assert!(root.nodes.as_ref().unwrap()[0]
|
||||||
|
.parent
|
||||||
|
.borrow()
|
||||||
|
.upgrade()
|
||||||
|
.is_some());
|
||||||
// check child's parent root id matches root's root id
|
// check child's parent root id matches root's root id
|
||||||
assert_eq!(
|
assert!(Rc::ptr_eq(
|
||||||
root.patterns[0].1.as_ref().unwrap().root.id(),
|
&root.nodes.as_ref().unwrap()[0]
|
||||||
root.root.id()
|
.parent
|
||||||
);
|
.borrow()
|
||||||
|
.upgrade()
|
||||||
|
.unwrap(),
|
||||||
|
&root
|
||||||
|
));
|
||||||
|
|
||||||
let output = format!("{:?}", root);
|
let output = format!("{:?}", root);
|
||||||
assert!(output.starts_with("ResourceMap {"));
|
assert!(output.starts_with("ResourceMap {"));
|
||||||
assert!(output.ends_with(" }"));
|
assert!(output.ends_with(" }"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn short_circuit() {
|
||||||
|
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
|
|
||||||
|
let mut user_root = ResourceDef::prefix("/user");
|
||||||
|
let mut user_map = ResourceMap::new(user_root.clone());
|
||||||
|
user_map.add(&mut ResourceDef::new("/u1"), None);
|
||||||
|
user_map.add(&mut ResourceDef::new("/u2"), None);
|
||||||
|
|
||||||
|
root.add(&mut ResourceDef::new("/user/u3"), None);
|
||||||
|
root.add(&mut user_root, Some(Rc::new(user_map)));
|
||||||
|
root.add(&mut ResourceDef::new("/user/u4"), None);
|
||||||
|
|
||||||
|
let rmap = Rc::new(root);
|
||||||
|
ResourceMap::finish(&rmap);
|
||||||
|
|
||||||
|
assert!(rmap.has_resource("/user/u1"));
|
||||||
|
assert!(rmap.has_resource("/user/u2"));
|
||||||
|
assert!(rmap.has_resource("/user/u3"));
|
||||||
|
assert!(!rmap.has_resource("/user/u4"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn url_for() {
|
||||||
|
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
|
|
||||||
|
let mut user_scope_rdef = ResourceDef::prefix("/user");
|
||||||
|
let mut user_scope_map = ResourceMap::new(user_scope_rdef.clone());
|
||||||
|
|
||||||
|
let mut user_rdef = ResourceDef::new("/{user_id}");
|
||||||
|
let mut user_map = ResourceMap::new(user_rdef.clone());
|
||||||
|
|
||||||
|
let mut post_rdef = ResourceDef::new("/post/{sub_id}");
|
||||||
|
post_rdef.set_name("post");
|
||||||
|
|
||||||
|
user_map.add(&mut post_rdef, None);
|
||||||
|
user_scope_map.add(&mut user_rdef, Some(Rc::new(user_map)));
|
||||||
|
root.add(&mut user_scope_rdef, Some(Rc::new(user_scope_map)));
|
||||||
|
|
||||||
|
let rmap = Rc::new(root);
|
||||||
|
ResourceMap::finish(&rmap);
|
||||||
|
|
||||||
|
let mut req = crate::test::TestRequest::default();
|
||||||
|
req.set_server_hostname("localhost:8888");
|
||||||
|
let req = req.to_http_request();
|
||||||
|
|
||||||
|
let url = rmap
|
||||||
|
.url_for(&req, "post", &["u123", "foobar"])
|
||||||
|
.unwrap()
|
||||||
|
.to_string();
|
||||||
|
assert_eq!(url, "http://localhost:8888/user/u123/post/foobar");
|
||||||
|
|
||||||
|
assert!(rmap.url_for(&req, "missing", &["u123"]).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn external_resource_with_no_name() {
|
||||||
|
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
|
|
||||||
|
let mut rdef = ResourceDef::new("https://duck.com/{query}");
|
||||||
|
root.add(&mut rdef, None);
|
||||||
|
|
||||||
|
let rmap = Rc::new(root);
|
||||||
|
ResourceMap::finish(&rmap);
|
||||||
|
|
||||||
|
assert!(!rmap.has_resource("https://duck.com/abc"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn external_resource_with_name() {
|
||||||
|
let mut root = ResourceMap::new(ResourceDef::prefix(""));
|
||||||
|
|
||||||
|
let mut rdef = ResourceDef::new("https://duck.com/{query}");
|
||||||
|
rdef.set_name("duck");
|
||||||
|
root.add(&mut rdef, None);
|
||||||
|
|
||||||
|
let rmap = Rc::new(root);
|
||||||
|
ResourceMap::finish(&rmap);
|
||||||
|
|
||||||
|
assert!(!rmap.has_resource("https://duck.com/abc"));
|
||||||
|
|
||||||
|
let mut req = crate::test::TestRequest::default();
|
||||||
|
req.set_server_hostname("localhost:8888");
|
||||||
|
let req = req.to_http_request();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
rmap.url_for(&req, "duck", &["abcd"]).unwrap().to_string(),
|
||||||
|
"https://duck.com/abcd"
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,9 +41,9 @@ type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Err
|
||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let app = App::new().service(
|
/// let app = App::new().service(
|
||||||
/// web::scope("/{project_id}/")
|
/// web::scope("/{project_id}/")
|
||||||
/// .service(web::resource("/path1").to(|| async { HttpResponse::Ok() }))
|
/// .service(web::resource("/path1").to(|| async { "OK" }))
|
||||||
/// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok())))
|
/// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok())))
|
||||||
/// .service(web::resource("/path3").route(web::head().to(|| HttpResponse::MethodNotAllowed())))
|
/// .service(web::resource("/path3").route(web::head().to(HttpResponse::MethodNotAllowed)))
|
||||||
/// );
|
/// );
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
|
|
@ -56,7 +56,7 @@ pub fn default_service(
|
||||||
/// async fn test_init_service() {
|
/// async fn test_init_service() {
|
||||||
/// let app = test::init_service(
|
/// let app = test::init_service(
|
||||||
/// App::new()
|
/// App::new()
|
||||||
/// .service(web::resource("/test").to(|| async { HttpResponse::Ok() }))
|
/// .service(web::resource("/test").to(|| async { "OK" }))
|
||||||
/// ).await;
|
/// ).await;
|
||||||
///
|
///
|
||||||
/// // Create request object
|
/// // Create request object
|
||||||
|
|
|
@ -30,7 +30,7 @@ use crate::{
|
||||||
///
|
///
|
||||||
/// # Extractor
|
/// # Extractor
|
||||||
/// To extract typed data from a request body, the inner type `T` must implement the
|
/// To extract typed data from a request body, the inner type `T` must implement the
|
||||||
/// [`serde::Deserialize`] trait.
|
/// [`DeserializeOwned`] trait.
|
||||||
///
|
///
|
||||||
/// Use [`FormConfig`] to configure extraction process.
|
/// Use [`FormConfig`] to configure extraction process.
|
||||||
///
|
///
|
||||||
|
|
|
@ -97,19 +97,13 @@ impl<T> ops::DerefMut for Json<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> fmt::Display for Json<T>
|
impl<T: fmt::Display> fmt::Display for Json<T> {
|
||||||
where
|
|
||||||
T: fmt::Display,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
fmt::Display::fmt(&self.0, f)
|
fmt::Display::fmt(&self.0, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Serialize for Json<T>
|
impl<T: Serialize> Serialize for Json<T> {
|
||||||
where
|
|
||||||
T: Serialize,
|
|
||||||
{
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: serde::Serializer,
|
S: serde::Serializer,
|
||||||
|
@ -133,10 +127,7 @@ impl<T: Serialize> Responder for Json<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [here](#extractor) for example of usage as an extractor.
|
/// See [here](#extractor) for example of usage as an extractor.
|
||||||
impl<T> FromRequest for Json<T>
|
impl<T: DeserializeOwned + 'static> FromRequest for Json<T> {
|
||||||
where
|
|
||||||
T: DeserializeOwned + 'static,
|
|
||||||
{
|
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = JsonExtractFut<T>;
|
type Future = JsonExtractFut<T>;
|
||||||
type Config = JsonConfig;
|
type Config = JsonConfig;
|
||||||
|
@ -166,10 +157,7 @@ pub struct JsonExtractFut<T> {
|
||||||
err_handler: JsonErrorHandler,
|
err_handler: JsonErrorHandler,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Future for JsonExtractFut<T>
|
impl<T: DeserializeOwned + 'static> Future for JsonExtractFut<T> {
|
||||||
where
|
|
||||||
T: DeserializeOwned + 'static,
|
|
||||||
{
|
|
||||||
type Output = Result<Json<T>, Error>;
|
type Output = Result<Json<T>, Error>;
|
||||||
|
|
||||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
|
@ -311,10 +299,7 @@ pub enum JsonBody<T> {
|
||||||
|
|
||||||
impl<T> Unpin for JsonBody<T> {}
|
impl<T> Unpin for JsonBody<T> {}
|
||||||
|
|
||||||
impl<T> JsonBody<T>
|
impl<T: DeserializeOwned> JsonBody<T> {
|
||||||
where
|
|
||||||
T: DeserializeOwned + 'static,
|
|
||||||
{
|
|
||||||
/// Create a new future to decode a JSON request payload.
|
/// Create a new future to decode a JSON request payload.
|
||||||
#[allow(clippy::borrow_interior_mutable_const)]
|
#[allow(clippy::borrow_interior_mutable_const)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
|
@ -395,10 +380,7 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Future for JsonBody<T>
|
impl<T: DeserializeOwned + 'static> Future for JsonBody<T> {
|
||||||
where
|
|
||||||
T: DeserializeOwned + 'static,
|
|
||||||
{
|
|
||||||
type Output = Result<T, JsonPayloadError>;
|
type Output = Result<T, JsonPayloadError>;
|
||||||
|
|
||||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||||
|
|
|
@ -3,14 +3,14 @@
|
||||||
use std::{fmt, ops, sync::Arc};
|
use std::{fmt, ops, sync::Arc};
|
||||||
|
|
||||||
use actix_utils::future::{err, ok, Ready};
|
use actix_utils::future::{err, ok, Ready};
|
||||||
use serde::de;
|
use serde::de::DeserializeOwned;
|
||||||
|
|
||||||
use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequest};
|
use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequest};
|
||||||
|
|
||||||
/// Extract typed information from the request's query.
|
/// Extract typed information from the request's query.
|
||||||
///
|
///
|
||||||
/// To extract typed data from the URL query string, the inner type `T` must implement the
|
/// To extract typed data from the URL query string, the inner type `T` must implement the
|
||||||
/// [`serde::Deserialize`] trait.
|
/// [`DeserializeOwned`] trait.
|
||||||
///
|
///
|
||||||
/// Use [`QueryConfig`] to configure extraction process.
|
/// Use [`QueryConfig`] to configure extraction process.
|
||||||
///
|
///
|
||||||
|
@ -46,18 +46,18 @@ use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequ
|
||||||
/// // To access the entire underlying query struct, use `.into_inner()`.
|
/// // To access the entire underlying query struct, use `.into_inner()`.
|
||||||
/// #[get("/debug1")]
|
/// #[get("/debug1")]
|
||||||
/// async fn debug1(info: web::Query<AuthRequest>) -> String {
|
/// async fn debug1(info: web::Query<AuthRequest>) -> String {
|
||||||
/// dbg!("Authorization object={:?}", info.into_inner());
|
/// dbg!("Authorization object = {:?}", info.into_inner());
|
||||||
/// "OK".to_string()
|
/// "OK".to_string()
|
||||||
/// }
|
/// }
|
||||||
///
|
///
|
||||||
/// // Or use `.0`, which is equivalent to `.into_inner()`.
|
/// // Or use destructuring, which is equivalent to `.into_inner()`.
|
||||||
/// #[get("/debug2")]
|
/// #[get("/debug2")]
|
||||||
/// async fn debug2(info: web::Query<AuthRequest>) -> String {
|
/// async fn debug2(web::Query(info): web::Query<AuthRequest>) -> String {
|
||||||
/// dbg!("Authorization object={:?}", info.0);
|
/// dbg!("Authorization object = {:?}", info);
|
||||||
/// "OK".to_string()
|
/// "OK".to_string()
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
pub struct Query<T>(pub T);
|
pub struct Query<T>(pub T);
|
||||||
|
|
||||||
impl<T> Query<T> {
|
impl<T> Query<T> {
|
||||||
|
@ -65,8 +65,10 @@ impl<T> Query<T> {
|
||||||
pub fn into_inner(self) -> T {
|
pub fn into_inner(self) -> T {
|
||||||
self.0
|
self.0
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Deserialize `T` from a URL encoded query parameter string.
|
impl<T: DeserializeOwned> Query<T> {
|
||||||
|
/// Deserialize a `T` from the URL encoded query parameter string.
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use std::collections::HashMap;
|
/// # use std::collections::HashMap;
|
||||||
|
@ -76,10 +78,7 @@ impl<T> Query<T> {
|
||||||
/// assert_eq!(numbers.get("two"), Some(&2));
|
/// assert_eq!(numbers.get("two"), Some(&2));
|
||||||
/// assert!(numbers.get("three").is_none());
|
/// assert!(numbers.get("three").is_none());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn from_query(query_str: &str) -> Result<Self, QueryPayloadError>
|
pub fn from_query(query_str: &str) -> Result<Self, QueryPayloadError> {
|
||||||
where
|
|
||||||
T: de::DeserializeOwned,
|
|
||||||
{
|
|
||||||
serde_urlencoded::from_str::<T>(query_str)
|
serde_urlencoded::from_str::<T>(query_str)
|
||||||
.map(Self)
|
.map(Self)
|
||||||
.map_err(QueryPayloadError::Deserialize)
|
.map_err(QueryPayloadError::Deserialize)
|
||||||
|
@ -107,10 +106,7 @@ impl<T: fmt::Display> fmt::Display for Query<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [here](#usage) for example of usage as an extractor.
|
/// See [here](#usage) for example of usage as an extractor.
|
||||||
impl<T> FromRequest for Query<T>
|
impl<T: DeserializeOwned> FromRequest for Query<T> {
|
||||||
where
|
|
||||||
T: de::DeserializeOwned,
|
|
||||||
{
|
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
type Future = Ready<Result<Self, Error>>;
|
type Future = Ready<Result<Self, Error>>;
|
||||||
type Config = QueryConfig;
|
type Config = QueryConfig;
|
||||||
|
@ -165,7 +161,7 @@ where
|
||||||
/// let query_cfg = web::QueryConfig::default()
|
/// let query_cfg = web::QueryConfig::default()
|
||||||
/// // use custom error handler
|
/// // use custom error handler
|
||||||
/// .error_handler(|err, req| {
|
/// .error_handler(|err, req| {
|
||||||
/// error::InternalError::from_response(err, HttpResponse::Conflict().into()).into()
|
/// error::InternalError::from_response(err, HttpResponse::Conflict().finish()).into()
|
||||||
/// });
|
/// });
|
||||||
///
|
///
|
||||||
/// App::new()
|
/// App::new()
|
||||||
|
|
|
@ -1077,3 +1077,22 @@ async fn test_data_drop() {
|
||||||
|
|
||||||
assert_eq!(num.load(Ordering::SeqCst), 0);
|
assert_eq!(num.load(Ordering::SeqCst), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn test_accept_encoding_no_match() {
|
||||||
|
let srv = actix_test::start_with(actix_test::config().h1(), || {
|
||||||
|
App::new()
|
||||||
|
.wrap(Compress::default())
|
||||||
|
.service(web::resource("/").route(web::to(move || HttpResponse::Ok().finish())))
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = srv
|
||||||
|
.get("/")
|
||||||
|
.append_header((ACCEPT_ENCODING, "compress, identity;q=0"))
|
||||||
|
.no_decompress()
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(response.status().as_u16(), 406);
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue