mirror of https://github.com/fafhrd91/actix-web
Merge branch 'master' into refactor/web/rmap
This commit is contained in:
commit
3ecc063c56
|
@ -76,7 +76,7 @@ actix-utils = "3.0.0"
|
|||
actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true }
|
||||
|
||||
actix-web-codegen = "0.5.0-beta.2"
|
||||
actix-http = "3.0.0-beta.8"
|
||||
actix-http = "3.0.0-beta.9"
|
||||
|
||||
ahash = "0.7"
|
||||
bytes = "1"
|
||||
|
|
|
@ -21,6 +21,7 @@ impl ResponseError for FilesError {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Display, Debug, PartialEq)]
|
||||
pub enum UriSegmentError {
|
||||
/// The segment started with the wrapped invalid character.
|
||||
|
|
|
@ -106,7 +106,7 @@ impl Files {
|
|||
};
|
||||
|
||||
Files {
|
||||
path: mount_path.to_owned(),
|
||||
path: mount_path.trim_end_matches('/').to_owned(),
|
||||
directory: dir,
|
||||
index: None,
|
||||
show_index: false,
|
||||
|
|
|
@ -2,6 +2,15 @@
|
|||
|
||||
## Unreleased - 2021-xx-xx
|
||||
|
||||
### Fixed
|
||||
* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364]
|
||||
|
||||
[#2364]: https://github.com/actix/actix-web/pull/2364
|
||||
|
||||
## 3.0.0-beta.8 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 3.0.0-beta.8 - 2021-06-26
|
||||
### Changed
|
||||
|
@ -210,6 +219,11 @@
|
|||
[#1878]: https://github.com/actix/actix-web/pull/1878
|
||||
|
||||
|
||||
## 2.2.1 - 2021-08-09
|
||||
### Fixed
|
||||
* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977)
|
||||
|
||||
|
||||
## 2.2.0 - 2020-11-25
|
||||
### Added
|
||||
* HttpResponse builders for 1xx status codes. [#1768]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "actix-http"
|
||||
version = "3.0.0-beta.8"
|
||||
version = "3.0.0-beta.9"
|
||||
authors = ["Nikolay Kim <fafhrd91@gmail.com>"]
|
||||
description = "HTTP primitives for the Actix ecosystem"
|
||||
keywords = ["actix", "http", "framework", "async", "futures"]
|
||||
|
@ -59,7 +59,7 @@ futures-core = { version = "0.3.7", default-features = false, features = ["alloc
|
|||
futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.1"
|
||||
http = "0.2.2"
|
||||
httparse = "1.3"
|
||||
httparse = "1.5.1"
|
||||
itoa = "0.4"
|
||||
language-tags = "0.3"
|
||||
local-channel = "0.1"
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
> HTTP primitives for the Actix ecosystem.
|
||||
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.8)
|
||||
[](https://docs.rs/actix-http/3.0.0-beta.9)
|
||||
[](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html)
|
||||

|
||||
<br />
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.8)
|
||||
[](https://deps.rs/crate/actix-http/3.0.0-beta.9)
|
||||
[](https://crates.io/crates/actix-http)
|
||||
[](https://discord.gg/NWpN5mmg3x)
|
||||
|
||||
|
|
|
@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) {
|
|||
group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| {
|
||||
b.iter(|| {
|
||||
let mut buf = black_box([0; 24]);
|
||||
_new::write_camel_case(black_box(bts), &mut buf)
|
||||
let len = black_box(bts.len());
|
||||
_new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case);
|
|||
criterion_main!(benches);
|
||||
|
||||
mod _new {
|
||||
pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
let buffer = unsafe {
|
||||
std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
std::slice::from_raw_parts_mut(buf, len)
|
||||
};
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ pub enum ParseError {
|
|||
#[display(fmt = "IO error: {}", _0)]
|
||||
Io(io::Error),
|
||||
|
||||
/// Parsing a field as string failed
|
||||
/// Parsing a field as string failed.
|
||||
#[display(fmt = "UTF8 error: {}", _0)]
|
||||
Utf8(Utf8Error),
|
||||
}
|
||||
|
|
|
@ -0,0 +1,432 @@
|
|||
use std::{io, task::Poll};
|
||||
|
||||
use bytes::{Buf as _, Bytes, BytesMut};
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub(super) enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl ChunkedState {
|
||||
pub(super) fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
|
||||
let rem = match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => b - b'0',
|
||||
b @ b'a'..=b'f' => b + 10 - b'a',
|
||||
b @ b'A'..=b'F' => b + 10 - b'A',
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
match size.checked_mul(radix) {
|
||||
Some(n) => {
|
||||
*size = n as u64;
|
||||
*size += rem as u64;
|
||||
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
None => {
|
||||
log::debug!("chunk size would overflow u64");
|
||||
Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Size is too big",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
// strictly 0x20 (space) should be disallowed but we don't parse quoted strings here
|
||||
0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid character in chunk extension",
|
||||
))),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
log::trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use actix_codec::Decoder as _;
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::Method;
|
||||
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
h1::decoder::{MessageDecoder, PayloadItem},
|
||||
HttpMessage as _, Request,
|
||||
};
|
||||
|
||||
macro_rules! parse_ready {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Ok(Some((msg, _))) => msg,
|
||||
Ok(_) => unreachable!("Eof during parsing http request"),
|
||||
Err(err) => unreachable!("Error during parsing http request: {:?}", err),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! expect_parse_err {
|
||||
($e:expr) => {{
|
||||
match MessageDecoder::<Request>::default().decode($e) {
|
||||
Err(err) => match err {
|
||||
ParseError::Io(_) => unreachable!("Parse error expected"),
|
||||
_ => {}
|
||||
},
|
||||
_ => unreachable!("Error expected"),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chunk_extension_quoted() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;hello=b;one=\"1 2 3\"\r\n\
|
||||
xx",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_extension_invalid() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: localhost:8080\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
2;x\nx\r\n\
|
||||
4c\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid character in chunk extension"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_chunk_size_overflow() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
f0000000000000003\r\n\
|
||||
abc\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let err = pl.decode(&mut buf).unwrap_err();
|
||||
assert!(err
|
||||
.to_string()
|
||||
.contains("Invalid chunk size line: Size is too big"));
|
||||
}
|
||||
}
|
|
@ -1,18 +1,18 @@
|
|||
use std::convert::TryFrom;
|
||||
use std::io;
|
||||
use std::marker::PhantomData;
|
||||
use std::task::Poll;
|
||||
use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll};
|
||||
|
||||
use actix_codec::Decoder;
|
||||
use bytes::{Buf, Bytes, BytesMut};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
use http::{header, Method, StatusCode, Uri, Version};
|
||||
use log::{debug, error, trace};
|
||||
|
||||
use crate::error::ParseError;
|
||||
use crate::header::HeaderMap;
|
||||
use crate::message::{ConnectionType, ResponseHead};
|
||||
use crate::request::Request;
|
||||
use super::chunked::ChunkedState;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
header::HeaderMap,
|
||||
message::{ConnectionType, ResponseHead},
|
||||
request::Request,
|
||||
};
|
||||
|
||||
pub(crate) const MAX_BUFFER_SIZE: usize = 131_072;
|
||||
const MAX_HEADERS: usize = 96;
|
||||
|
@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized {
|
|||
let mut has_upgrade_websocket = false;
|
||||
let mut expect = false;
|
||||
let mut chunked = false;
|
||||
let mut seen_te = false;
|
||||
let mut content_length = None;
|
||||
|
||||
{
|
||||
|
@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized {
|
|||
};
|
||||
|
||||
match name {
|
||||
header::CONTENT_LENGTH => {
|
||||
if let Ok(s) = value.to_str() {
|
||||
header::CONTENT_LENGTH if content_length.is_some() => {
|
||||
debug!("multiple Content-Length");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::CONTENT_LENGTH => match value.to_str() {
|
||||
Ok(s) if s.trim().starts_with('+') => {
|
||||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
Ok(s) => {
|
||||
if let Ok(len) = s.parse::<u64>() {
|
||||
if len != 0 {
|
||||
content_length = Some(len);
|
||||
|
@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized {
|
|||
debug!("illegal Content-Length: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
}
|
||||
Err(_) => {
|
||||
debug!("illegal Content-Length: {:?}", value);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// transfer-encoding
|
||||
header::TRANSFER_ENCODING if seen_te => {
|
||||
debug!("multiple Transfer-Encoding not allowed");
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
||||
header::TRANSFER_ENCODING => {
|
||||
seen_te = true;
|
||||
|
||||
if let Ok(s) = value.to_str().map(str::trim) {
|
||||
chunked = s.eq_ignore_ascii_case("chunked");
|
||||
if s.eq_ignore_ascii_case("chunked") {
|
||||
chunked = true;
|
||||
} else if s.eq_ignore_ascii_case("identity") {
|
||||
// allow silently since multiple TE headers are already checked
|
||||
} else {
|
||||
debug!("illegal Transfer-Encoding: {:?}", s);
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
} else {
|
||||
return Err(ParseError::Header);
|
||||
}
|
||||
|
@ -186,10 +212,17 @@ impl MessageType for Request {
|
|||
let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY;
|
||||
|
||||
let (len, method, uri, ver, h_len) = {
|
||||
let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY;
|
||||
// SAFETY:
|
||||
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
// safe because the type we are claiming to have initialized here is a
|
||||
// bunch of `MaybeUninit`s, which do not require initialization.
|
||||
let mut parsed = unsafe {
|
||||
MaybeUninit::<[MaybeUninit<httparse::Header<'_>>; MAX_HEADERS]>::uninit()
|
||||
.assume_init()
|
||||
};
|
||||
|
||||
let mut req = httparse::Request::new(&mut parsed);
|
||||
match req.parse(src)? {
|
||||
let mut req = httparse::Request::new(&mut []);
|
||||
match req.parse_with_uninit_headers(src, &mut parsed)? {
|
||||
httparse::Status::Complete(len) => {
|
||||
let method = Method::from_bytes(req.method.unwrap().as_bytes())
|
||||
.map_err(|_| ParseError::Method)?;
|
||||
|
@ -408,20 +441,6 @@ enum Kind {
|
|||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
enum ChunkedState {
|
||||
Size,
|
||||
SizeLws,
|
||||
Extension,
|
||||
SizeLf,
|
||||
Body,
|
||||
BodyCr,
|
||||
BodyLf,
|
||||
EndCr,
|
||||
EndLf,
|
||||
End,
|
||||
}
|
||||
|
||||
impl Decoder for PayloadDecoder {
|
||||
type Item = PayloadItem;
|
||||
type Error = io::Error;
|
||||
|
@ -451,19 +470,23 @@ impl Decoder for PayloadDecoder {
|
|||
Kind::Chunked(ref mut state, ref mut size) => {
|
||||
loop {
|
||||
let mut buf = None;
|
||||
|
||||
// advances the chunked state
|
||||
*state = match state.step(src, size, &mut buf) {
|
||||
Poll::Pending => return Ok(None),
|
||||
Poll::Ready(Ok(state)) => state,
|
||||
Poll::Ready(Err(e)) => return Err(e),
|
||||
};
|
||||
|
||||
if *state == ChunkedState::End {
|
||||
trace!("End of chunked stream");
|
||||
return Ok(Some(PayloadItem::Eof));
|
||||
}
|
||||
|
||||
if let Some(buf) = buf {
|
||||
return Ok(Some(PayloadItem::Chunk(buf)));
|
||||
}
|
||||
|
||||
if src.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
@ -480,201 +503,40 @@ impl Decoder for PayloadDecoder {
|
|||
}
|
||||
}
|
||||
|
||||
macro_rules! byte (
|
||||
($rdr:ident) => ({
|
||||
if $rdr.len() > 0 {
|
||||
let b = $rdr[0];
|
||||
$rdr.advance(1);
|
||||
b
|
||||
} else {
|
||||
return Poll::Pending
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
impl ChunkedState {
|
||||
fn step(
|
||||
&self,
|
||||
body: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
use self::ChunkedState::*;
|
||||
match *self {
|
||||
Size => ChunkedState::read_size(body, size),
|
||||
SizeLws => ChunkedState::read_size_lws(body),
|
||||
Extension => ChunkedState::read_extension(body),
|
||||
SizeLf => ChunkedState::read_size_lf(body, size),
|
||||
Body => ChunkedState::read_body(body, size, buf),
|
||||
BodyCr => ChunkedState::read_body_cr(body),
|
||||
BodyLf => ChunkedState::read_body_lf(body),
|
||||
EndCr => ChunkedState::read_end_cr(body),
|
||||
EndLf => ChunkedState::read_end_lf(body),
|
||||
End => Poll::Ready(Ok(ChunkedState::End)),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_size(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
let radix = 16;
|
||||
match byte!(rdr) {
|
||||
b @ b'0'..=b'9' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b - b'0');
|
||||
}
|
||||
b @ b'a'..=b'f' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'a');
|
||||
}
|
||||
b @ b'A'..=b'F' => {
|
||||
*size *= radix;
|
||||
*size += u64::from(b + 10 - b'A');
|
||||
}
|
||||
b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => return Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => {
|
||||
return Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size line: Invalid Size",
|
||||
)));
|
||||
}
|
||||
}
|
||||
Poll::Ready(Ok(ChunkedState::Size))
|
||||
}
|
||||
|
||||
fn read_size_lws(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("read_size_lws");
|
||||
match byte!(rdr) {
|
||||
// LWS can follow the chunk size, but no more digits can come
|
||||
b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)),
|
||||
b';' => Poll::Ready(Ok(ChunkedState::Extension)),
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size linear white space",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_extension(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)),
|
||||
_ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions
|
||||
}
|
||||
}
|
||||
fn read_size_lf(
|
||||
rdr: &mut BytesMut,
|
||||
size: &mut u64,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)),
|
||||
b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk size LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body(
|
||||
rdr: &mut BytesMut,
|
||||
rem: &mut u64,
|
||||
buf: &mut Option<Bytes>,
|
||||
) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
trace!("Chunked read, remaining={:?}", rem);
|
||||
|
||||
let len = rdr.len() as u64;
|
||||
if len == 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
let slice;
|
||||
if *rem > len {
|
||||
slice = rdr.split().freeze();
|
||||
*rem -= len;
|
||||
} else {
|
||||
slice = rdr.split_to(*rem as usize).freeze();
|
||||
*rem = 0;
|
||||
}
|
||||
*buf = Some(slice);
|
||||
if *rem > 0 {
|
||||
Poll::Ready(Ok(ChunkedState::Body))
|
||||
} else {
|
||||
Poll::Ready(Ok(ChunkedState::BodyCr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn read_body_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_body_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::Size)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk body LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_cr(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end CR",
|
||||
))),
|
||||
}
|
||||
}
|
||||
fn read_end_lf(rdr: &mut BytesMut) -> Poll<Result<ChunkedState, io::Error>> {
|
||||
match byte!(rdr) {
|
||||
b'\n' => Poll::Ready(Ok(ChunkedState::End)),
|
||||
_ => Poll::Ready(Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"Invalid chunk end LF",
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use http::{Method, Version};
|
||||
|
||||
use super::*;
|
||||
use crate::error::ParseError;
|
||||
use crate::http::header::{HeaderName, SET_COOKIE};
|
||||
use crate::HttpMessage;
|
||||
use crate::{
|
||||
error::ParseError,
|
||||
http::header::{HeaderName, SET_COOKIE},
|
||||
HttpMessage as _,
|
||||
};
|
||||
|
||||
impl PayloadType {
|
||||
fn unwrap(self) -> PayloadDecoder {
|
||||
pub(crate) fn unwrap(self) -> PayloadDecoder {
|
||||
match self {
|
||||
PayloadType::Payload(pl) => pl,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_unhandled(&self) -> bool {
|
||||
pub(crate) fn is_unhandled(&self) -> bool {
|
||||
matches!(self, PayloadType::Stream(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl PayloadItem {
|
||||
fn chunk(self) -> Bytes {
|
||||
pub(crate) fn chunk(self) -> Bytes {
|
||||
match self {
|
||||
PayloadItem::Chunk(chunk) => chunk,
|
||||
_ => panic!("error"),
|
||||
}
|
||||
}
|
||||
fn eof(&self) -> bool {
|
||||
|
||||
pub(crate) fn eof(&self) -> bool {
|
||||
matches!(*self, PayloadItem::Eof)
|
||||
}
|
||||
}
|
||||
|
@ -967,34 +829,6 @@ mod tests {
|
|||
assert!(req.upgrade());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_request_chunked() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
|
||||
// intentional typo in "chunked"
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chnked\r\n\r\n",
|
||||
);
|
||||
let req = parse_ready!(&mut buf);
|
||||
|
||||
if let Ok(val) = req.chunked() {
|
||||
assert!(!val);
|
||||
} else {
|
||||
unreachable!("Error");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_headers_content_length_err_1() {
|
||||
let mut buf = BytesMut::from(
|
||||
|
@ -1112,126 +946,6 @@ mod tests {
|
|||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n");
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"data"
|
||||
);
|
||||
assert_eq!(
|
||||
pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(),
|
||||
b"line"
|
||||
);
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_and_next_message() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(
|
||||
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\
|
||||
POST /test2 HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n"
|
||||
.iter(),
|
||||
);
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"line");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
|
||||
let (req, _) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
assert_eq!(*req.method(), Method::POST);
|
||||
assert!(req.chunked().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_http_request_chunked_payload_chunks() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (req, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(req.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4\r\n1111\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"1111");
|
||||
|
||||
buf.extend(b"4\r\ndata\r");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"data");
|
||||
|
||||
buf.extend(b"\n4");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
buf.extend(b"\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"li");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"li");
|
||||
|
||||
//trailers
|
||||
//buf.feed_data("test: test\r\n");
|
||||
//not_ready!(reader.parse(&mut buf, &mut readbuf));
|
||||
|
||||
buf.extend(b"ne\r\n0\r\n");
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(msg.chunk().as_ref(), b"ne");
|
||||
assert!(pl.decode(&mut buf).unwrap().is_none());
|
||||
|
||||
buf.extend(b"\r\n");
|
||||
assert!(pl.decode(&mut buf).unwrap().unwrap().eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_chunked_payload_chunk_extension() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
transfer-encoding: chunked\r\n\
|
||||
\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
assert!(msg.chunked().unwrap());
|
||||
|
||||
buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n")
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"data"));
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk();
|
||||
assert_eq!(chunk, Bytes::from_static(b"line"));
|
||||
let msg = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert!(msg.eof());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_response_http10_read_until_eof() {
|
||||
let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data");
|
||||
|
@ -1243,4 +957,84 @@ mod tests {
|
|||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_content_length() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 4\r\n\
|
||||
Content-Length: 2\r\n\
|
||||
\r\n\
|
||||
abcd",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_content_length_plus() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: +3\r\n\
|
||||
\r\n\
|
||||
000",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_unknown_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Transfer-Encoding: JUNK\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
5\r\n\
|
||||
hello\r\n\
|
||||
0",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hrs_multiple_transfer_encoding() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET / HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 51\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
Transfer-Encoding: chunked\r\n\
|
||||
\r\n\
|
||||
0\r\n\
|
||||
\r\n\
|
||||
GET /forbidden HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\r\n",
|
||||
);
|
||||
|
||||
expect_parse_err!(&mut buf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn transfer_encoding_agrees() {
|
||||
let mut buf = BytesMut::from(
|
||||
"GET /test HTTP/1.1\r\n\
|
||||
Host: example.com\r\n\
|
||||
Content-Length: 3\r\n\
|
||||
Transfer-Encoding: identity\r\n\
|
||||
\r\n\
|
||||
0\r\n",
|
||||
);
|
||||
|
||||
let mut reader = MessageDecoder::<Request>::default();
|
||||
let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap();
|
||||
let mut pl = pl.unwrap();
|
||||
|
||||
let chunk = pl.decode(&mut buf).unwrap().unwrap();
|
||||
assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n")));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,6 +81,7 @@ pub(crate) trait MessageType: Sized {
|
|||
match length {
|
||||
BodySize::Stream => {
|
||||
if chunked {
|
||||
skip_len = true;
|
||||
if camel_case {
|
||||
dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n")
|
||||
} else {
|
||||
|
@ -174,7 +175,7 @@ pub(crate) trait MessageType: Sized {
|
|||
unsafe {
|
||||
if camel_case {
|
||||
// use Camel-Case headers
|
||||
write_camel_case(k, from_raw_parts_mut(buf, k_len));
|
||||
write_camel_case(k, buf, k_len);
|
||||
} else {
|
||||
write_data(k, buf, k_len);
|
||||
}
|
||||
|
@ -472,15 +473,22 @@ impl TransferEncoding {
|
|||
}
|
||||
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given length matches given value length.
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) {
|
||||
debug_assert_eq!(value.len(), len);
|
||||
copy_nonoverlapping(value.as_ptr(), buf, len);
|
||||
}
|
||||
|
||||
fn write_camel_case(value: &[u8], buffer: &mut [u8]) {
|
||||
/// # Safety
|
||||
/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is
|
||||
/// valid for writes of at least `len` bytes.
|
||||
unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) {
|
||||
// first copy entire (potentially wrong) slice to output
|
||||
buffer[..value.len()].copy_from_slice(value);
|
||||
write_data(value, buf, len);
|
||||
|
||||
// SAFETY: We just initialized the buffer with `value`
|
||||
let buffer = from_raw_parts_mut(buf, len);
|
||||
|
||||
let mut iter = value.iter();
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
//! HTTP/1 protocol implementation.
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
|
||||
mod chunked;
|
||||
mod client;
|
||||
mod codec;
|
||||
mod decoder;
|
||||
|
|
|
@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> {
|
|||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
// handle in-progress multi value lists first
|
||||
if let Some((ref name, ref mut vals)) = self.multi_inner {
|
||||
if let Some((name, ref mut vals)) = self.multi_inner {
|
||||
match vals.get(self.multi_idx) {
|
||||
Some(val) => {
|
||||
self.multi_idx += 1;
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
//! [rustls]: https://crates.io/crates/rustls
|
||||
//! [trust-dns]: https://crates.io/crates/trust-dns
|
||||
|
||||
#![deny(rust_2018_idioms, nonstandard_style)]
|
||||
#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)]
|
||||
#![allow(
|
||||
clippy::type_complexity,
|
||||
clippy::too_many_arguments,
|
||||
|
|
|
@ -209,7 +209,7 @@ impl RequestHeadType {
|
|||
impl AsRef<RequestHead> for RequestHeadType {
|
||||
fn as_ref(&self) -> &RequestHead {
|
||||
match self {
|
||||
RequestHeadType::Owned(head) => &head,
|
||||
RequestHeadType::Owned(head) => head,
|
||||
RequestHeadType::Rc(head, _) => head.as_ref(),
|
||||
}
|
||||
}
|
||||
|
@ -363,7 +363,7 @@ impl<T: Head> std::ops::Deref for Message<T> {
|
|||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.head.as_ref()
|
||||
self.head.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,11 +5,14 @@
|
|||
* Disallow prefix routes with tail segments. [#379]
|
||||
* Enforce path separators on dynamic prefixes. [#378]
|
||||
* Improve malformed path error message. [#384]
|
||||
* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355]
|
||||
* Prefix segments with trailing slashes define a trailing empty segment. [#2355]
|
||||
|
||||
[#378]: https://github.com/actix/actix-net/pull/378
|
||||
[#379]: https://github.com/actix/actix-net/pull/379
|
||||
[#380]: https://github.com/actix/actix-net/pull/380
|
||||
[#384]: https://github.com/actix/actix-net/pull/384
|
||||
[#2355]: https://github.com/actix/actix-web/pull/2355
|
||||
|
||||
|
||||
## 0.5.0-beta.1 - 2021-07-20
|
||||
|
|
|
@ -125,7 +125,7 @@ impl<T: ResourcePath> Path<T> {
|
|||
for (seg_name, val) in self.segments.iter() {
|
||||
if name == seg_name {
|
||||
return match val {
|
||||
PathItem::Static(ref s) => Some(&s),
|
||||
PathItem::Static(ref s) => Some(s),
|
||||
PathItem::Segment(s, e) => {
|
||||
Some(&self.path.path()[(*s as usize)..(*e as usize)])
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> {
|
|||
if self.idx < self.params.segment_count() {
|
||||
let idx = self.idx;
|
||||
let res = match self.params.segments[idx].1 {
|
||||
PathItem::Static(ref s) => &s,
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)],
|
||||
};
|
||||
self.idx += 1;
|
||||
|
@ -207,7 +207,7 @@ impl<T: ResourcePath> Index<usize> for Path<T> {
|
|||
|
||||
fn index(&self, idx: usize) -> &str {
|
||||
match self.segments[idx].1 {
|
||||
PathItem::Static(ref s) => &s,
|
||||
PathItem::Static(ref s) => s,
|
||||
PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)],
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,9 +28,27 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
/// regex engine.
|
||||
///
|
||||
///
|
||||
/// # Pattern Format and Matching Behavior
|
||||
///
|
||||
/// Resource pattern is defined as a string of zero or more _segments_ where each segment is
|
||||
/// preceeded by a slash `/`.
|
||||
///
|
||||
/// This means that pattern string __must__ either be empty or begin with a slash (`/`).
|
||||
/// This also implies that a trailing slash in pattern defines an empty segment.
|
||||
/// For example, the pattern `"/user/"` has two segments: `["user", ""]`
|
||||
///
|
||||
/// A key point to undertand is that `ResourceDef` matches segments, not strings.
|
||||
/// It matches segments individually.
|
||||
/// For example, the pattern `/user/` is not considered a prefix for the path `/user/123/456`,
|
||||
/// because the second segment doesn't match: `["user", ""]` vs `["user", "123", "456"]`.
|
||||
///
|
||||
/// This definition is consistent with the definition of absolute URL path in
|
||||
/// [RFC 3986 (section 3.3)](https://datatracker.ietf.org/doc/html/rfc3986#section-3.3)
|
||||
///
|
||||
///
|
||||
/// # Static Resources
|
||||
/// A static resource is the most basic type of definition. Pass a regular string to
|
||||
/// [new][Self::new]. Conforming paths must match the string exactly.
|
||||
/// A static resource is the most basic type of definition. Pass a pattern to
|
||||
/// [new][Self::new]. Conforming paths must match the pattern exactly.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```
|
||||
|
@ -39,6 +57,7 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
///
|
||||
/// assert!(resource.is_match("/home"));
|
||||
///
|
||||
/// assert!(!resource.is_match("/home/"));
|
||||
/// assert!(!resource.is_match("/home/new"));
|
||||
/// assert!(!resource.is_match("/homes"));
|
||||
/// assert!(!resource.is_match("/search"));
|
||||
|
@ -85,12 +104,13 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
///
|
||||
///
|
||||
/// # Prefix Resources
|
||||
/// A prefix resource is defined as pattern that can match just the start of a path.
|
||||
/// A prefix resource is defined as pattern that can match just the start of a path, up to a
|
||||
/// segment boundary.
|
||||
///
|
||||
/// This library chooses to restrict that definition slightly. In particular, when matching, the
|
||||
/// prefix must be separated from the remaining part of the path by a `/` character, either at the
|
||||
/// end of the prefix pattern or at the start of the the remaining slice. In practice, this is not
|
||||
/// much of a limitation.
|
||||
/// Prefix patterns with a trailing slash may have an unexpected, though correct, behavior.
|
||||
/// They define and therefore require an empty segment in order to match. Examples are given below.
|
||||
///
|
||||
/// Empty pattern matches any path as a prefix.
|
||||
///
|
||||
/// Prefix resources can contain dynamic segments.
|
||||
///
|
||||
|
@ -102,9 +122,12 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
/// assert!(resource.is_match("/home/new"));
|
||||
/// assert!(!resource.is_match("/homes"));
|
||||
///
|
||||
/// // prefix pattern with a trailing slash
|
||||
/// let resource = ResourceDef::prefix("/user/{id}/");
|
||||
/// assert!(resource.is_match("/user/123/"));
|
||||
/// assert!(resource.is_match("/user/123/stars"));
|
||||
/// assert!(resource.is_match("/user/123//stars"));
|
||||
/// assert!(!resource.is_match("/user/123/stars"));
|
||||
/// assert!(!resource.is_match("/user/123"));
|
||||
/// ```
|
||||
///
|
||||
///
|
||||
|
@ -117,6 +140,10 @@ const REGEX_FLAGS: &str = "(?s-m)";
|
|||
/// `{name:regex}`. For example, `/user/{id:\d+}` will only match paths where the user ID
|
||||
/// is numeric.
|
||||
///
|
||||
/// The regex could potentially match multiple segments. If this is not wanted, then care must be
|
||||
/// taken to avoid matching a slash `/`. It is guaranteed, however, that the match ends at a
|
||||
/// segment boundary; the pattern `r"(/|$)` is always appended to the regex.
|
||||
///
|
||||
/// By default, dynamic segments use this regex: `[^/]+`. This shows why it is the case, as shown in
|
||||
/// the earlier section, that segments capture a slice of the path up to the next `/` character.
|
||||
///
|
||||
|
@ -276,7 +303,7 @@ impl ResourceDef {
|
|||
let mut pattern_data = Vec::new();
|
||||
|
||||
for pattern in &patterns {
|
||||
match ResourceDef::parse(&pattern, false, true) {
|
||||
match ResourceDef::parse(pattern, false, true) {
|
||||
(PatternType::Dynamic(re, names), _) => {
|
||||
re_set.push(re.as_str().to_owned());
|
||||
pattern_data.push((re, names));
|
||||
|
@ -298,7 +325,7 @@ impl ResourceDef {
|
|||
}
|
||||
}
|
||||
|
||||
/// Constructs a new resource definition using a string pattern that performs prefix matching.
|
||||
/// Constructs a new resource definition using a pattern that performs prefix matching.
|
||||
///
|
||||
/// More specifically, the regular expressions generated for matching are different when using
|
||||
/// this method vs using `new`; they will not be appended with the `$` meta-character that
|
||||
|
@ -320,13 +347,6 @@ impl ResourceDef {
|
|||
/// assert!(!resource.is_match("user/123"));
|
||||
/// assert!(!resource.is_match("user/123/stars"));
|
||||
/// assert!(!resource.is_match("/foo"));
|
||||
///
|
||||
/// let resource = ResourceDef::prefix("user/{id}");
|
||||
/// assert!(resource.is_match("user/123"));
|
||||
/// assert!(resource.is_match("user/123/stars"));
|
||||
/// assert!(!resource.is_match("/user/123"));
|
||||
/// assert!(!resource.is_match("/user/123/stars"));
|
||||
/// assert!(!resource.is_match("foo"));
|
||||
/// ```
|
||||
pub fn prefix(path: &str) -> Self {
|
||||
profile_method!(prefix);
|
||||
|
@ -591,24 +611,7 @@ impl ResourceDef {
|
|||
|
||||
match self.pat_type {
|
||||
PatternType::Static(ref s) => s == path,
|
||||
|
||||
PatternType::Prefix(ref prefix) if prefix == path => true,
|
||||
PatternType::Prefix(ref prefix) => is_strict_prefix(prefix, path),
|
||||
|
||||
// dynamic prefix
|
||||
PatternType::Dynamic(ref re, _) if !re.as_str().ends_with('$') => {
|
||||
match re.find(path) {
|
||||
// prefix matches exactly
|
||||
Some(m) if m.end() == path.len() => true,
|
||||
|
||||
// prefix matches part
|
||||
Some(m) => is_strict_prefix(m.as_str(), path),
|
||||
|
||||
// prefix does not match
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
PatternType::Prefix(ref prefix) => is_prefix(prefix, path),
|
||||
PatternType::Dynamic(ref re, _) => re.is_match(path),
|
||||
PatternType::DynamicSet(ref re, _) => re.is_match(path),
|
||||
}
|
||||
|
@ -656,30 +659,15 @@ impl ResourceDef {
|
|||
PatternType::Static(segment) if path == segment => Some(segment.len()),
|
||||
PatternType::Static(_) => None,
|
||||
|
||||
PatternType::Prefix(prefix) if path == prefix => Some(prefix.len()),
|
||||
PatternType::Prefix(prefix) if is_strict_prefix(prefix, path) => Some(prefix.len()),
|
||||
PatternType::Prefix(prefix) if is_prefix(prefix, path) => Some(prefix.len()),
|
||||
PatternType::Prefix(_) => None,
|
||||
|
||||
// dynamic prefix
|
||||
PatternType::Dynamic(ref re, _) if !re.as_str().ends_with('$') => {
|
||||
match re.find(path) {
|
||||
// prefix matches exactly
|
||||
Some(m) if m.end() == path.len() => Some(m.end()),
|
||||
|
||||
// prefix matches part
|
||||
Some(m) if is_strict_prefix(m.as_str(), path) => Some(m.end()),
|
||||
|
||||
// prefix does not match
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
PatternType::Dynamic(re, _) => re.find(path).map(|m| m.end()),
|
||||
PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()),
|
||||
|
||||
PatternType::DynamicSet(re, params) => {
|
||||
let idx = re.matches(path).into_iter().next()?;
|
||||
let (ref pattern, _) = params[idx];
|
||||
pattern.find(path).map(|m| m.end())
|
||||
Some(pattern.captures(path)?[1].len())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -790,7 +778,7 @@ impl ResourceDef {
|
|||
profile_section!(pattern_dynamic_extract_captures);
|
||||
|
||||
for (no, name) in names.iter().enumerate() {
|
||||
if let Some(m) = captures.name(&name) {
|
||||
if let Some(m) = captures.name(name) {
|
||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||
} else {
|
||||
log::error!(
|
||||
|
@ -802,7 +790,7 @@ impl ResourceDef {
|
|||
}
|
||||
};
|
||||
|
||||
(captures[0].len(), Some(names))
|
||||
(captures[1].len(), Some(names))
|
||||
}
|
||||
|
||||
PatternType::DynamicSet(re, params) => {
|
||||
|
@ -820,7 +808,7 @@ impl ResourceDef {
|
|||
};
|
||||
|
||||
for (no, name) in names.iter().enumerate() {
|
||||
if let Some(m) = captures.name(&name) {
|
||||
if let Some(m) = captures.name(name) {
|
||||
segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16);
|
||||
} else {
|
||||
log::error!("Dynamic path match but not all segments found: {}", name);
|
||||
|
@ -828,7 +816,7 @@ impl ResourceDef {
|
|||
}
|
||||
}
|
||||
|
||||
(captures[0].len(), Some(names))
|
||||
(captures[1].len(), Some(names))
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1112,8 +1100,16 @@ impl ResourceDef {
|
|||
);
|
||||
}
|
||||
|
||||
if !is_prefix && !has_tail_segment {
|
||||
re.push('$');
|
||||
// Store the pattern in capture group #1 to have context info outside it
|
||||
let mut re = format!("({})", re);
|
||||
|
||||
// Ensure the match ends at a segment boundary
|
||||
if !has_tail_segment {
|
||||
if is_prefix {
|
||||
re.push_str(r"(/|$)");
|
||||
} else {
|
||||
re.push('$');
|
||||
}
|
||||
}
|
||||
|
||||
let re = match Regex::new(&re) {
|
||||
|
@ -1185,10 +1181,12 @@ pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> {
|
|||
}
|
||||
|
||||
/// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`.
|
||||
///
|
||||
/// The `strict` refers to the fact that this will return `false` if `prefix == path`.
|
||||
fn is_strict_prefix(prefix: &str, path: &str) -> bool {
|
||||
path.starts_with(prefix) && (prefix.ends_with('/') || path[prefix.len()..].starts_with('/'))
|
||||
fn is_prefix(prefix: &str, path: &str) -> bool {
|
||||
match path.strip_prefix(prefix) {
|
||||
// Ensure the match ends at segment boundary
|
||||
Some(rem) if rem.is_empty() || rem.starts_with('/') => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -1501,54 +1499,70 @@ mod tests {
|
|||
|
||||
let re = ResourceDef::prefix("/name/");
|
||||
assert!(re.is_match("/name/"));
|
||||
assert!(re.is_match("/name/gs"));
|
||||
assert!(re.is_match("/name//gs"));
|
||||
assert!(!re.is_match("/name/gs"));
|
||||
assert!(!re.is_match("/name"));
|
||||
|
||||
let mut path = Path::new("/name/gs");
|
||||
assert!(!re.capture_match_info(&mut path));
|
||||
|
||||
let mut path = Path::new("/name//gs");
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
assert_eq!(path.unprocessed(), "gs");
|
||||
assert_eq!(path.unprocessed(), "/gs");
|
||||
|
||||
let re = ResourceDef::root_prefix("name/");
|
||||
assert!(re.is_match("/name/"));
|
||||
assert!(re.is_match("/name/gs"));
|
||||
assert!(re.is_match("/name//gs"));
|
||||
assert!(!re.is_match("/name/gs"));
|
||||
assert!(!re.is_match("/name"));
|
||||
|
||||
let mut path = Path::new("/name/gs");
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
assert_eq!(path.unprocessed(), "gs");
|
||||
assert!(!re.capture_match_info(&mut path));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prefix_dynamic() {
|
||||
let re = ResourceDef::prefix("/{name}/");
|
||||
let re = ResourceDef::prefix("/{name}");
|
||||
|
||||
assert!(re.is_prefix());
|
||||
|
||||
assert!(re.is_match("/name/"));
|
||||
assert!(re.is_match("/name/gs"));
|
||||
assert!(!re.is_match("/name"));
|
||||
assert!(re.is_match("/name"));
|
||||
|
||||
assert_eq!(re.find_match("/name/"), Some(6));
|
||||
assert_eq!(re.find_match("/name/gs"), Some(6));
|
||||
assert_eq!(re.find_match("/name"), None);
|
||||
assert_eq!(re.find_match("/name/"), Some(5));
|
||||
assert_eq!(re.find_match("/name/gs"), Some(5));
|
||||
assert_eq!(re.find_match("/name"), Some(5));
|
||||
assert_eq!(re.find_match(""), None);
|
||||
|
||||
let mut path = Path::new("/test2/");
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
assert_eq!(&path["name"], "test2");
|
||||
assert_eq!(&path[0], "test2");
|
||||
assert_eq!(path.unprocessed(), "");
|
||||
assert_eq!(path.unprocessed(), "/");
|
||||
|
||||
let mut path = Path::new("/test2/subpath1/subpath2/index.html");
|
||||
assert!(re.capture_match_info(&mut path));
|
||||
assert_eq!(&path["name"], "test2");
|
||||
assert_eq!(&path[0], "test2");
|
||||
assert_eq!(path.unprocessed(), "subpath1/subpath2/index.html");
|
||||
assert_eq!(path.unprocessed(), "/subpath1/subpath2/index.html");
|
||||
|
||||
let resource = ResourceDef::prefix("/user");
|
||||
// input string shorter than prefix
|
||||
assert!(resource.find_match("/foo").is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prefix_empty() {
|
||||
let re = ResourceDef::prefix("");
|
||||
|
||||
assert!(re.is_prefix());
|
||||
|
||||
assert!(re.is_match(""));
|
||||
assert!(re.is_match("/"));
|
||||
assert!(re.is_match("/name/test/test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_path_list() {
|
||||
let mut s = String::new();
|
||||
|
@ -1667,14 +1681,17 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn consistent_match_length() {
|
||||
let result = Some(5);
|
||||
fn prefix_trailing_slash() {
|
||||
// The prefix "/abc/" matches two segments: ["user", ""]
|
||||
|
||||
// These are not prefixes
|
||||
let re = ResourceDef::prefix("/abc/");
|
||||
assert_eq!(re.find_match("/abc/def"), result);
|
||||
assert_eq!(re.find_match("/abc/def"), None);
|
||||
assert_eq!(re.find_match("/abc//def"), Some(5));
|
||||
|
||||
let re = ResourceDef::prefix("/{id}/");
|
||||
assert_eq!(re.find_match("/abc/def"), result);
|
||||
assert_eq!(re.find_match("/abc/def"), None);
|
||||
assert_eq!(re.find_match("/abc//def"), Some(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -4,7 +4,7 @@ digraph {
|
|||
subgraph cluster_net {
|
||||
label="actix-net"
|
||||
"actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service"
|
||||
"actix-tls" "actix-tracing" "actix-utils" "actix-router"
|
||||
"actix-tls" "actix-tracing" "actix-utils"
|
||||
}
|
||||
|
||||
subgraph cluster_other {
|
||||
|
@ -25,7 +25,6 @@ digraph {
|
|||
"actix-tls" -> { "tokio-util" }[color="#009900"]
|
||||
"actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" }
|
||||
"actix-rt" -> { "actix-macros" "tokio" }
|
||||
"actix-router" -> { "bytestring" }
|
||||
|
||||
"local-channel" -> { "local-waker" }
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ digraph {
|
|||
"web-actors"
|
||||
"web-codegen"
|
||||
"http-test"
|
||||
"router"
|
||||
|
||||
{ rank=same; "multipart" "web-actors" "http-test" };
|
||||
{ rank=same; "files" "awc" "web" };
|
||||
|
@ -36,7 +37,7 @@ digraph {
|
|||
"rt" -> { "macros" }
|
||||
|
||||
{ rank=same; "utils" "codec" };
|
||||
{ rank=same; "rt" "macros" "service" "router" };
|
||||
{ rank=same; "rt" "macros" "service" };
|
||||
|
||||
// actix
|
||||
|
||||
|
|
|
@ -10,9 +10,10 @@ digraph {
|
|||
"actix-web-codegen"
|
||||
"actix-http-test"
|
||||
"actix-test"
|
||||
"actix-router"
|
||||
}
|
||||
|
||||
"actix-web" -> { "actix-web-codegen" "actix-http" }
|
||||
"actix-web" -> { "actix-web-codegen" "actix-http" "actix-router" }
|
||||
"awc" -> { "actix-http" }
|
||||
"actix-web-actors" -> { "actix" "actix-web" "actix-http" }
|
||||
"actix-multipart" -> { "actix-web" }
|
||||
|
|
|
@ -457,7 +457,7 @@ impl Header for ContentDisposition {
|
|||
|
||||
fn parse<T: crate::HttpMessage>(msg: &T) -> Result<Self, crate::error::ParseError> {
|
||||
if let Some(h) = msg.headers().get(&Self::name()) {
|
||||
Self::from_raw(&h)
|
||||
Self::from_raw(h)
|
||||
} else {
|
||||
Err(crate::error::ParseError::Header)
|
||||
}
|
||||
|
|
|
@ -553,7 +553,7 @@ impl FormatText {
|
|||
*self = FormatText::Str(s.to_string());
|
||||
}
|
||||
FormatText::RemoteAddr => {
|
||||
let s = if let Some(ref peer) = req.connection_info().remote_addr() {
|
||||
let s = if let Some(peer) = req.connection_info().remote_addr() {
|
||||
FormatText::Str((*peer).to_string())
|
||||
} else {
|
||||
FormatText::Str("-".to_string())
|
||||
|
|
|
@ -184,7 +184,7 @@ impl HttpRequest {
|
|||
U: IntoIterator<Item = I>,
|
||||
I: AsRef<str>,
|
||||
{
|
||||
self.resource_map().url_for(&self, name, elements)
|
||||
self.resource_map().url_for(self, name, elements)
|
||||
}
|
||||
|
||||
/// Generate url for named resource
|
||||
|
@ -199,7 +199,7 @@ impl HttpRequest {
|
|||
#[inline]
|
||||
/// Get a reference to a `ResourceMap` of current application.
|
||||
pub fn resource_map(&self) -> &ResourceMap {
|
||||
&self.app_state().rmap()
|
||||
self.app_state().rmap()
|
||||
}
|
||||
|
||||
/// Peer socket address.
|
||||
|
|
66
src/scope.rs
66
src/scope.rs
|
@ -1153,4 +1153,70 @@ mod tests {
|
|||
Bytes::from_static(b"http://localhost:8080/a/b/c/12345")
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn dynamic_scopes() {
|
||||
let srv = init_service(
|
||||
App::new().service(
|
||||
web::scope("/{a}/").service(
|
||||
web::scope("/{b}/")
|
||||
.route("", web::get().to(|_: HttpRequest| HttpResponse::Created()))
|
||||
.route(
|
||||
"/",
|
||||
web::get().to(|_: HttpRequest| HttpResponse::Accepted()),
|
||||
)
|
||||
.route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
// note the unintuitive behavior with trailing slashes on scopes with dynamic segments
|
||||
let req = TestRequest::with_uri("/a//b//c").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
|
||||
let req = TestRequest::with_uri("/a//b/").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::CREATED);
|
||||
|
||||
let req = TestRequest::with_uri("/a//b//").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::ACCEPTED);
|
||||
|
||||
let req = TestRequest::with_uri("/a//b//c/d").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
|
||||
let srv = init_service(
|
||||
App::new().service(
|
||||
web::scope("/{a}").service(
|
||||
web::scope("/{b}")
|
||||
.route("", web::get().to(|_: HttpRequest| HttpResponse::Created()))
|
||||
.route(
|
||||
"/",
|
||||
web::get().to(|_: HttpRequest| HttpResponse::Accepted()),
|
||||
)
|
||||
.route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await;
|
||||
|
||||
let req = TestRequest::with_uri("/a/b/c").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::OK);
|
||||
|
||||
let req = TestRequest::with_uri("/a/b").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::CREATED);
|
||||
|
||||
let req = TestRequest::with_uri("/a/b/").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::ACCEPTED);
|
||||
|
||||
let req = TestRequest::with_uri("/a/b/c/d").to_request();
|
||||
let resp = call_service(&srv, req).await;
|
||||
assert_eq!(resp.status(), StatusCode::NOT_FOUND);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ impl ServiceRequest {
|
|||
/// This method returns reference to the request head
|
||||
#[inline]
|
||||
pub fn head(&self) -> &RequestHead {
|
||||
&self.req.head()
|
||||
self.req.head()
|
||||
}
|
||||
|
||||
/// This method returns reference to the request head
|
||||
|
|
|
@ -253,7 +253,7 @@ where
|
|||
Ok(bytes) => {
|
||||
let fallback = bytes.clone();
|
||||
let left =
|
||||
L::from_request(&this.req, &mut payload_from_bytes(bytes));
|
||||
L::from_request(this.req, &mut payload_from_bytes(bytes));
|
||||
EitherExtractState::Left { left, fallback }
|
||||
}
|
||||
Err(err) => break Err(EitherExtractError::Bytes(err)),
|
||||
|
@ -265,7 +265,7 @@ where
|
|||
Ok(extracted) => break Ok(Either::Left(extracted)),
|
||||
Err(left_err) => {
|
||||
let right = R::from_request(
|
||||
&this.req,
|
||||
this.req,
|
||||
&mut payload_from_bytes(mem::take(fallback)),
|
||||
);
|
||||
EitherExtractState::Right {
|
||||
|
|
|
@ -425,7 +425,7 @@ where
|
|||
}
|
||||
}
|
||||
None => {
|
||||
let json = serde_json::from_slice::<T>(&buf)
|
||||
let json = serde_json::from_slice::<T>(buf)
|
||||
.map_err(JsonPayloadError::Deserialize)?;
|
||||
return Poll::Ready(Ok(json));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue