From e965d8298f421e9c89fe98b1300b8361e948c324 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Thu, 12 Aug 2021 20:18:09 +0100 Subject: [PATCH 01/23] HRS security fixes (#2363) --- actix-http/CHANGES.md | 10 + actix-http/Cargo.toml | 2 +- actix-http/README.md | 4 +- actix-http/src/error.rs | 2 +- actix-http/src/h1/chunked.rs | 432 +++++++++++++++++++++++++++++++ actix-http/src/h1/decoder.rs | 481 ++++++++++------------------------- actix-http/src/h1/encoder.rs | 1 + actix-http/src/h1/mod.rs | 2 + 8 files changed, 583 insertions(+), 351 deletions(-) create mode 100644 actix-http/src/h1/chunked.rs diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 8ead43718..f52f5ba68 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -3,6 +3,11 @@ ## Unreleased - 2021-xx-xx +## 3.0.0-beta.8 - 2021-08-09 +### Fixed +* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) + + ## 3.0.0-beta.8 - 2021-06-26 ### Changed * Change compression algorithm features flags. [#2250] @@ -210,6 +215,11 @@ [#1878]: https://github.com/actix/actix-web/pull/1878 +## 2.2.1 - 2021-08-09 +### Fixed +* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) + + ## 2.2.0 - 2020-11-25 ### Added * HttpResponse builders for 1xx status codes. [#1768] diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index a12fed4b9..4ce55dca1 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-http" -version = "3.0.0-beta.8" +version = "3.0.0-beta.9" authors = ["Nikolay Kim "] description = "HTTP primitives for the Actix ecosystem" keywords = ["actix", "http", "framework", "async", "futures"] diff --git a/actix-http/README.md b/actix-http/README.md index de1ef0a9b..5b06583bc 100644 --- a/actix-http/README.md +++ b/actix-http/README.md @@ -3,11 +3,11 @@ > HTTP primitives for the Actix ecosystem. [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) -[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.8)](https://docs.rs/actix-http/3.0.0-beta.8) +[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.9)](https://docs.rs/actix-http/3.0.0-beta.9) [![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
-[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.8) +[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.9) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-http/src/error.rs b/actix-http/src/error.rs index 54666e072..f7d7f696a 100644 --- a/actix-http/src/error.rs +++ b/actix-http/src/error.rs @@ -196,7 +196,7 @@ pub enum ParseError { #[display(fmt = "IO error: {}", _0)] Io(io::Error), - /// Parsing a field as string failed + /// Parsing a field as string failed. #[display(fmt = "UTF8 error: {}", _0)] Utf8(Utf8Error), } diff --git a/actix-http/src/h1/chunked.rs b/actix-http/src/h1/chunked.rs new file mode 100644 index 000000000..1224ce08c --- /dev/null +++ b/actix-http/src/h1/chunked.rs @@ -0,0 +1,432 @@ +use std::{io, task::Poll}; + +use bytes::{Buf as _, Bytes, BytesMut}; + +macro_rules! byte ( + ($rdr:ident) => ({ + if $rdr.len() > 0 { + let b = $rdr[0]; + $rdr.advance(1); + b + } else { + return Poll::Pending + } + }) +); + +#[derive(Debug, PartialEq, Clone)] +pub(super) enum ChunkedState { + Size, + SizeLws, + Extension, + SizeLf, + Body, + BodyCr, + BodyLf, + EndCr, + EndLf, + End, +} + +impl ChunkedState { + pub(super) fn step( + &self, + body: &mut BytesMut, + size: &mut u64, + buf: &mut Option, + ) -> Poll> { + use self::ChunkedState::*; + match *self { + Size => ChunkedState::read_size(body, size), + SizeLws => ChunkedState::read_size_lws(body), + Extension => ChunkedState::read_extension(body), + SizeLf => ChunkedState::read_size_lf(body, size), + Body => ChunkedState::read_body(body, size, buf), + BodyCr => ChunkedState::read_body_cr(body), + BodyLf => ChunkedState::read_body_lf(body), + EndCr => ChunkedState::read_end_cr(body), + EndLf => ChunkedState::read_end_lf(body), + End => Poll::Ready(Ok(ChunkedState::End)), + } + } + + fn read_size( + rdr: &mut BytesMut, + size: &mut u64, + ) -> Poll> { + let radix = 16; + + let rem = match byte!(rdr) { + b @ b'0'..=b'9' => b - b'0', + b @ b'a'..=b'f' => b + 10 - b'a', + b @ b'A'..=b'F' => b + 10 - b'A', + b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)), + b';' => return Poll::Ready(Ok(ChunkedState::Extension)), + b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)), + _ => { + return Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size line: Invalid Size", + ))); + } + }; + + match size.checked_mul(radix) { + Some(n) => { + *size = n as u64; + *size += rem as u64; + + Poll::Ready(Ok(ChunkedState::Size)) + } + None => { + log::debug!("chunk size would overflow u64"); + Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size line: Size is too big", + ))) + } + } + } + + fn read_size_lws(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + // LWS can follow the chunk size, but no more digits can come + b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)), + b';' => Poll::Ready(Ok(ChunkedState::Extension)), + b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size linear white space", + ))), + } + } + fn read_extension(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), + // strictly 0x20 (space) should be disallowed but we don't parse quoted strings here + 0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid character in chunk extension", + ))), + _ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions + } + } + fn read_size_lf( + rdr: &mut BytesMut, + size: &mut u64, + ) -> Poll> { + match byte!(rdr) { + b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)), + b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size LF", + ))), + } + } + + fn read_body( + rdr: &mut BytesMut, + rem: &mut u64, + buf: &mut Option, + ) -> Poll> { + log::trace!("Chunked read, remaining={:?}", rem); + + let len = rdr.len() as u64; + if len == 0 { + Poll::Ready(Ok(ChunkedState::Body)) + } else { + let slice; + if *rem > len { + slice = rdr.split().freeze(); + *rem -= len; + } else { + slice = rdr.split_to(*rem as usize).freeze(); + *rem = 0; + } + *buf = Some(slice); + if *rem > 0 { + Poll::Ready(Ok(ChunkedState::Body)) + } else { + Poll::Ready(Ok(ChunkedState::BodyCr)) + } + } + } + + fn read_body_cr(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk body CR", + ))), + } + } + fn read_body_lf(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\n' => Poll::Ready(Ok(ChunkedState::Size)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk body LF", + ))), + } + } + fn read_end_cr(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk end CR", + ))), + } + } + fn read_end_lf(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\n' => Poll::Ready(Ok(ChunkedState::End)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk end LF", + ))), + } + } +} + +#[cfg(test)] +mod tests { + use actix_codec::Decoder as _; + use bytes::{Bytes, BytesMut}; + use http::Method; + + use crate::{ + error::ParseError, + h1::decoder::{MessageDecoder, PayloadItem}, + HttpMessage as _, Request, + }; + + macro_rules! parse_ready { + ($e:expr) => {{ + match MessageDecoder::::default().decode($e) { + Ok(Some((msg, _))) => msg, + Ok(_) => unreachable!("Eof during parsing http request"), + Err(err) => unreachable!("Error during parsing http request: {:?}", err), + } + }}; + } + + macro_rules! expect_parse_err { + ($e:expr) => {{ + match MessageDecoder::::default().decode($e) { + Err(err) => match err { + ParseError::Io(_) => unreachable!("Parse error expected"), + _ => {} + }, + _ => unreachable!("Error expected"), + } + }}; + } + + #[test] + fn test_parse_chunked_payload_chunk_extension() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\ + \r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(msg.chunked().unwrap()); + + buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n") + let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); + assert_eq!(chunk, Bytes::from_static(b"data")); + let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); + assert_eq!(chunk, Bytes::from_static(b"line")); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert!(msg.eof()); + } + + #[test] + fn test_request_chunked() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let req = parse_ready!(&mut buf); + + if let Ok(val) = req.chunked() { + assert!(val); + } else { + unreachable!("Error"); + } + + // intentional typo in "chunked" + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chnked\r\n\r\n", + ); + expect_parse_err!(&mut buf); + } + + #[test] + fn test_http_request_chunked_payload() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); + assert_eq!( + pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), + b"data" + ); + assert_eq!( + pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), + b"line" + ); + assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); + } + + #[test] + fn test_http_request_chunked_payload_and_next_message() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend( + b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\ + POST /test2 HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n" + .iter(), + ); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"data"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"line"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert!(msg.eof()); + + let (req, _) = reader.decode(&mut buf).unwrap().unwrap(); + assert!(req.chunked().unwrap()); + assert_eq!(*req.method(), Method::POST); + assert!(req.chunked().unwrap()); + } + + #[test] + fn test_http_request_chunked_payload_chunks() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend(b"4\r\n1111\r\n"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"1111"); + + buf.extend(b"4\r\ndata\r"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"data"); + + buf.extend(b"\n4"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"\r"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + buf.extend(b"\n"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"li"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"li"); + + //trailers + //buf.feed_data("test: test\r\n"); + //not_ready!(reader.parse(&mut buf, &mut readbuf)); + + buf.extend(b"ne\r\n0\r\n"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"ne"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"\r\n"); + assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); + } + + #[test] + fn chunk_extension_quoted() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + Host: localhost:8080\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 2;hello=b;one=\"1 2 3\"\r\n\ + xx", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let chunk = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx"))); + } + + #[test] + fn hrs_chunk_extension_invalid() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: localhost:8080\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 2;x\nx\r\n\ + 4c\r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let err = pl.decode(&mut buf).unwrap_err(); + assert!(err + .to_string() + .contains("Invalid character in chunk extension")); + } + + #[test] + fn hrs_chunk_size_overflow() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + f0000000000000003\r\n\ + abc\r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let err = pl.decode(&mut buf).unwrap_err(); + assert!(err + .to_string() + .contains("Invalid chunk size line: Size is too big")); + } +} diff --git a/actix-http/src/h1/decoder.rs b/actix-http/src/h1/decoder.rs index f240710c2..313ffd5e0 100644 --- a/actix-http/src/h1/decoder.rs +++ b/actix-http/src/h1/decoder.rs @@ -1,18 +1,18 @@ -use std::convert::TryFrom; -use std::io; -use std::marker::PhantomData; -use std::task::Poll; +use std::{convert::TryFrom, io, marker::PhantomData, task::Poll}; use actix_codec::Decoder; -use bytes::{Buf, Bytes, BytesMut}; +use bytes::{Bytes, BytesMut}; use http::header::{HeaderName, HeaderValue}; use http::{header, Method, StatusCode, Uri, Version}; use log::{debug, error, trace}; -use crate::error::ParseError; -use crate::header::HeaderMap; -use crate::message::{ConnectionType, ResponseHead}; -use crate::request::Request; +use super::chunked::ChunkedState; +use crate::{ + error::ParseError, + header::HeaderMap, + message::{ConnectionType, ResponseHead}, + request::Request, +}; pub(crate) const MAX_BUFFER_SIZE: usize = 131_072; const MAX_HEADERS: usize = 96; @@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized { let mut has_upgrade_websocket = false; let mut expect = false; let mut chunked = false; + let mut seen_te = false; let mut content_length = None; { @@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized { }; match name { - header::CONTENT_LENGTH => { - if let Ok(s) = value.to_str() { + header::CONTENT_LENGTH if content_length.is_some() => { + debug!("multiple Content-Length"); + return Err(ParseError::Header); + } + + header::CONTENT_LENGTH => match value.to_str() { + Ok(s) if s.trim().starts_with('+') => { + debug!("illegal Content-Length: {:?}", s); + return Err(ParseError::Header); + } + Ok(s) => { if let Ok(len) = s.parse::() { if len != 0 { content_length = Some(len); @@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized { debug!("illegal Content-Length: {:?}", s); return Err(ParseError::Header); } - } else { + } + Err(_) => { debug!("illegal Content-Length: {:?}", value); return Err(ParseError::Header); } - } + }, + // transfer-encoding + header::TRANSFER_ENCODING if seen_te => { + debug!("multiple Transfer-Encoding not allowed"); + return Err(ParseError::Header); + } + header::TRANSFER_ENCODING => { + seen_te = true; + if let Ok(s) = value.to_str().map(str::trim) { - chunked = s.eq_ignore_ascii_case("chunked"); + if s.eq_ignore_ascii_case("chunked") { + chunked = true; + } else if s.eq_ignore_ascii_case("identity") { + // allow silently since multiple TE headers are already checked + } else { + debug!("illegal Transfer-Encoding: {:?}", s); + return Err(ParseError::Header); + } } else { return Err(ParseError::Header); } @@ -408,20 +434,6 @@ enum Kind { Eof, } -#[derive(Debug, PartialEq, Clone)] -enum ChunkedState { - Size, - SizeLws, - Extension, - SizeLf, - Body, - BodyCr, - BodyLf, - EndCr, - EndLf, - End, -} - impl Decoder for PayloadDecoder { type Item = PayloadItem; type Error = io::Error; @@ -451,19 +463,23 @@ impl Decoder for PayloadDecoder { Kind::Chunked(ref mut state, ref mut size) => { loop { let mut buf = None; + // advances the chunked state *state = match state.step(src, size, &mut buf) { Poll::Pending => return Ok(None), Poll::Ready(Ok(state)) => state, Poll::Ready(Err(e)) => return Err(e), }; + if *state == ChunkedState::End { trace!("End of chunked stream"); return Ok(Some(PayloadItem::Eof)); } + if let Some(buf) = buf { return Ok(Some(PayloadItem::Chunk(buf))); } + if src.is_empty() { return Ok(None); } @@ -480,201 +496,40 @@ impl Decoder for PayloadDecoder { } } -macro_rules! byte ( - ($rdr:ident) => ({ - if $rdr.len() > 0 { - let b = $rdr[0]; - $rdr.advance(1); - b - } else { - return Poll::Pending - } - }) -); - -impl ChunkedState { - fn step( - &self, - body: &mut BytesMut, - size: &mut u64, - buf: &mut Option, - ) -> Poll> { - use self::ChunkedState::*; - match *self { - Size => ChunkedState::read_size(body, size), - SizeLws => ChunkedState::read_size_lws(body), - Extension => ChunkedState::read_extension(body), - SizeLf => ChunkedState::read_size_lf(body, size), - Body => ChunkedState::read_body(body, size, buf), - BodyCr => ChunkedState::read_body_cr(body), - BodyLf => ChunkedState::read_body_lf(body), - EndCr => ChunkedState::read_end_cr(body), - EndLf => ChunkedState::read_end_lf(body), - End => Poll::Ready(Ok(ChunkedState::End)), - } - } - - fn read_size( - rdr: &mut BytesMut, - size: &mut u64, - ) -> Poll> { - let radix = 16; - match byte!(rdr) { - b @ b'0'..=b'9' => { - *size *= radix; - *size += u64::from(b - b'0'); - } - b @ b'a'..=b'f' => { - *size *= radix; - *size += u64::from(b + 10 - b'a'); - } - b @ b'A'..=b'F' => { - *size *= radix; - *size += u64::from(b + 10 - b'A'); - } - b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)), - b';' => return Poll::Ready(Ok(ChunkedState::Extension)), - b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => { - return Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size line: Invalid Size", - ))); - } - } - Poll::Ready(Ok(ChunkedState::Size)) - } - - fn read_size_lws(rdr: &mut BytesMut) -> Poll> { - trace!("read_size_lws"); - match byte!(rdr) { - // LWS can follow the chunk size, but no more digits can come - b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)), - b';' => Poll::Ready(Ok(ChunkedState::Extension)), - b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size linear white space", - ))), - } - } - fn read_extension(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions - } - } - fn read_size_lf( - rdr: &mut BytesMut, - size: &mut u64, - ) -> Poll> { - match byte!(rdr) { - b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)), - b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size LF", - ))), - } - } - - fn read_body( - rdr: &mut BytesMut, - rem: &mut u64, - buf: &mut Option, - ) -> Poll> { - trace!("Chunked read, remaining={:?}", rem); - - let len = rdr.len() as u64; - if len == 0 { - Poll::Ready(Ok(ChunkedState::Body)) - } else { - let slice; - if *rem > len { - slice = rdr.split().freeze(); - *rem -= len; - } else { - slice = rdr.split_to(*rem as usize).freeze(); - *rem = 0; - } - *buf = Some(slice); - if *rem > 0 { - Poll::Ready(Ok(ChunkedState::Body)) - } else { - Poll::Ready(Ok(ChunkedState::BodyCr)) - } - } - } - - fn read_body_cr(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk body CR", - ))), - } - } - fn read_body_lf(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\n' => Poll::Ready(Ok(ChunkedState::Size)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk body LF", - ))), - } - } - fn read_end_cr(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk end CR", - ))), - } - } - fn read_end_lf(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\n' => Poll::Ready(Ok(ChunkedState::End)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk end LF", - ))), - } - } -} - #[cfg(test)] mod tests { use bytes::{Bytes, BytesMut}; use http::{Method, Version}; use super::*; - use crate::error::ParseError; - use crate::http::header::{HeaderName, SET_COOKIE}; - use crate::HttpMessage; + use crate::{ + error::ParseError, + http::header::{HeaderName, SET_COOKIE}, + HttpMessage as _, + }; impl PayloadType { - fn unwrap(self) -> PayloadDecoder { + pub(crate) fn unwrap(self) -> PayloadDecoder { match self { PayloadType::Payload(pl) => pl, _ => panic!(), } } - fn is_unhandled(&self) -> bool { + pub(crate) fn is_unhandled(&self) -> bool { matches!(self, PayloadType::Stream(_)) } } impl PayloadItem { - fn chunk(self) -> Bytes { + pub(crate) fn chunk(self) -> Bytes { match self { PayloadItem::Chunk(chunk) => chunk, _ => panic!("error"), } } - fn eof(&self) -> bool { + + pub(crate) fn eof(&self) -> bool { matches!(*self, PayloadItem::Eof) } } @@ -967,34 +822,6 @@ mod tests { assert!(req.upgrade()); } - #[test] - fn test_request_chunked() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let req = parse_ready!(&mut buf); - - if let Ok(val) = req.chunked() { - assert!(val); - } else { - unreachable!("Error"); - } - - // intentional typo in "chunked" - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chnked\r\n\r\n", - ); - let req = parse_ready!(&mut buf); - - if let Ok(val) = req.chunked() { - assert!(!val); - } else { - unreachable!("Error"); - } - } - #[test] fn test_headers_content_length_err_1() { let mut buf = BytesMut::from( @@ -1112,126 +939,6 @@ mod tests { expect_parse_err!(&mut buf); } - #[test] - fn test_http_request_chunked_payload() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); - assert_eq!( - pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), - b"data" - ); - assert_eq!( - pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), - b"line" - ); - assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); - } - - #[test] - fn test_http_request_chunked_payload_and_next_message() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend( - b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\ - POST /test2 HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n" - .iter(), - ); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"data"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"line"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert!(msg.eof()); - - let (req, _) = reader.decode(&mut buf).unwrap().unwrap(); - assert!(req.chunked().unwrap()); - assert_eq!(*req.method(), Method::POST); - assert!(req.chunked().unwrap()); - } - - #[test] - fn test_http_request_chunked_payload_chunks() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend(b"4\r\n1111\r\n"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"1111"); - - buf.extend(b"4\r\ndata\r"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"data"); - - buf.extend(b"\n4"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"\r"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - buf.extend(b"\n"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"li"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"li"); - - //trailers - //buf.feed_data("test: test\r\n"); - //not_ready!(reader.parse(&mut buf, &mut readbuf)); - - buf.extend(b"ne\r\n0\r\n"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"ne"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"\r\n"); - assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); - } - - #[test] - fn test_parse_chunked_payload_chunk_extension() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\ - \r\n", - ); - - let mut reader = MessageDecoder::::default(); - let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(msg.chunked().unwrap()); - - buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n") - let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); - assert_eq!(chunk, Bytes::from_static(b"data")); - let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); - assert_eq!(chunk, Bytes::from_static(b"line")); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert!(msg.eof()); - } - #[test] fn test_response_http10_read_until_eof() { let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data"); @@ -1243,4 +950,84 @@ mod tests { let chunk = pl.decode(&mut buf).unwrap().unwrap(); assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data"))); } + + #[test] + fn hrs_multiple_content_length() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 4\r\n\ + Content-Length: 2\r\n\ + \r\n\ + abcd", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_content_length_plus() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: +3\r\n\ + \r\n\ + 000", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_unknown_transfer_encoding() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Transfer-Encoding: JUNK\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 5\r\n\ + hello\r\n\ + 0", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_multiple_transfer_encoding() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 51\r\n\ + Transfer-Encoding: identity\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 0\r\n\ + \r\n\ + GET /forbidden HTTP/1.1\r\n\ + Host: example.com\r\n\r\n", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn transfer_encoding_agrees() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 3\r\n\ + Transfer-Encoding: identity\r\n\ + \r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let chunk = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n"))); + } } diff --git a/actix-http/src/h1/encoder.rs b/actix-http/src/h1/encoder.rs index 254981123..4e5c9d238 100644 --- a/actix-http/src/h1/encoder.rs +++ b/actix-http/src/h1/encoder.rs @@ -81,6 +81,7 @@ pub(crate) trait MessageType: Sized { match length { BodySize::Stream => { if chunked { + skip_len = true; if camel_case { dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n") } else { diff --git a/actix-http/src/h1/mod.rs b/actix-http/src/h1/mod.rs index 7e6df6ceb..17cbfb90f 100644 --- a/actix-http/src/h1/mod.rs +++ b/actix-http/src/h1/mod.rs @@ -1,6 +1,8 @@ //! HTTP/1 protocol implementation. + use bytes::{Bytes, BytesMut}; +mod chunked; mod client; mod codec; mod decoder; From 384164cc148e4bf31a8ff3ddffd1139e64a1c15f Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Fri, 6 Aug 2021 20:10:58 +0100 Subject: [PATCH 02/23] update graphs --- docs/graphs/net-only.dot | 3 +-- docs/graphs/web-focus.dot | 3 ++- docs/graphs/web-only.dot | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/graphs/net-only.dot b/docs/graphs/net-only.dot index bee0185ab..8a58ec2b8 100644 --- a/docs/graphs/net-only.dot +++ b/docs/graphs/net-only.dot @@ -4,7 +4,7 @@ digraph { subgraph cluster_net { label="actix-net" "actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service" - "actix-tls" "actix-tracing" "actix-utils" "actix-router" + "actix-tls" "actix-tracing" "actix-utils" } subgraph cluster_other { @@ -25,7 +25,6 @@ digraph { "actix-tls" -> { "tokio-util" }[color="#009900"] "actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" } "actix-rt" -> { "actix-macros" "tokio" } - "actix-router" -> { "bytestring" } "local-channel" -> { "local-waker" } diff --git a/docs/graphs/web-focus.dot b/docs/graphs/web-focus.dot index 2c6e2779b..63b3eaa82 100644 --- a/docs/graphs/web-focus.dot +++ b/docs/graphs/web-focus.dot @@ -10,6 +10,7 @@ digraph { "web-actors" "web-codegen" "http-test" + "router" { rank=same; "multipart" "web-actors" "http-test" }; { rank=same; "files" "awc" "web" }; @@ -36,7 +37,7 @@ digraph { "rt" -> { "macros" } { rank=same; "utils" "codec" }; - { rank=same; "rt" "macros" "service" "router" }; + { rank=same; "rt" "macros" "service" }; // actix diff --git a/docs/graphs/web-only.dot b/docs/graphs/web-only.dot index b0decd818..ee74c292b 100644 --- a/docs/graphs/web-only.dot +++ b/docs/graphs/web-only.dot @@ -10,9 +10,10 @@ digraph { "actix-web-codegen" "actix-http-test" "actix-test" + "actix-router" } - "actix-web" -> { "actix-web-codegen" "actix-http" } + "actix-web" -> { "actix-web-codegen" "actix-http" "actix-router" } "awc" -> { "actix-http" } "actix-web-actors" -> { "actix" "actix-web" "actix-http" } "actix-multipart" -> { "actix-web" } From a0c0bff944febe1d984aedc4866acee1bed95bdd Mon Sep 17 00:00:00 2001 From: Thales <46510852+thalesfragoso@users.noreply.github.com> Date: Fri, 13 Aug 2021 14:41:19 -0300 Subject: [PATCH 03/23] Don't create a slice to potential uninit data on h1 encoder (#2364) Co-authored-by: Rob Ede --- actix-http/CHANGES.md | 4 ++++ actix-http/benches/write-camel-case.rs | 10 +++++++--- actix-http/src/h1/encoder.rs | 15 +++++++++++---- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index f52f5ba68..9ed28105f 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -2,6 +2,10 @@ ## Unreleased - 2021-xx-xx +### Fixed +* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364] + +[#2364]: https://github.com/actix/actix-web/pull/2364 ## 3.0.0-beta.8 - 2021-08-09 ### Fixed diff --git a/actix-http/benches/write-camel-case.rs b/actix-http/benches/write-camel-case.rs index fa4930eb9..ccf09b37e 100644 --- a/actix-http/benches/write-camel-case.rs +++ b/actix-http/benches/write-camel-case.rs @@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) { group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| { b.iter(|| { let mut buf = black_box([0; 24]); - _new::write_camel_case(black_box(bts), &mut buf) + let len = black_box(bts.len()); + _new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len) }); }); } @@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case); criterion_main!(benches); mod _new { - pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) { + pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) { // first copy entire (potentially wrong) slice to output - buffer[..value.len()].copy_from_slice(value); + let buffer = unsafe { + std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len); + std::slice::from_raw_parts_mut(buf, len) + }; let mut iter = value.iter(); diff --git a/actix-http/src/h1/encoder.rs b/actix-http/src/h1/encoder.rs index 4e5c9d238..5e1d47785 100644 --- a/actix-http/src/h1/encoder.rs +++ b/actix-http/src/h1/encoder.rs @@ -175,7 +175,7 @@ pub(crate) trait MessageType: Sized { unsafe { if camel_case { // use Camel-Case headers - write_camel_case(k, from_raw_parts_mut(buf, k_len)); + write_camel_case(k, buf, k_len); } else { write_data(k, buf, k_len); } @@ -473,15 +473,22 @@ impl TransferEncoding { } /// # Safety -/// Callers must ensure that the given length matches given value length. +/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is +/// valid for writes of at least `len` bytes. unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) { debug_assert_eq!(value.len(), len); copy_nonoverlapping(value.as_ptr(), buf, len); } -fn write_camel_case(value: &[u8], buffer: &mut [u8]) { +/// # Safety +/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is +/// valid for writes of at least `len` bytes. +unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) { // first copy entire (potentially wrong) slice to output - buffer[..value.len()].copy_from_slice(value); + write_data(value, buf, len); + + // SAFETY: We just initialized the buffer with `value` + let buffer = from_raw_parts_mut(buf, len); let mut iter = value.iter(); From 5f412c67db4c65dba51942bd098b58acc8fae035 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Fri, 13 Aug 2021 18:49:58 +0100 Subject: [PATCH 04/23] clippy --- actix-files/src/error.rs | 1 + actix-http/src/header/map.rs | 2 +- actix-http/src/lib.rs | 2 +- actix-http/src/message.rs | 4 ++-- actix-router/src/path.rs | 6 +++--- actix-router/src/resource.rs | 6 +++--- src/http/header/content_disposition.rs | 2 +- src/middleware/logger.rs | 2 +- src/request.rs | 4 ++-- src/service.rs | 2 +- src/types/either.rs | 4 ++-- src/types/json.rs | 2 +- 12 files changed, 19 insertions(+), 18 deletions(-) diff --git a/actix-files/src/error.rs b/actix-files/src/error.rs index e5f2d4779..f8e32eef7 100644 --- a/actix-files/src/error.rs +++ b/actix-files/src/error.rs @@ -21,6 +21,7 @@ impl ResponseError for FilesError { } } +#[allow(clippy::enum_variant_names)] #[derive(Display, Debug, PartialEq)] pub enum UriSegmentError { /// The segment started with the wrapped invalid character. diff --git a/actix-http/src/header/map.rs b/actix-http/src/header/map.rs index 634d9282f..a8fd9715b 100644 --- a/actix-http/src/header/map.rs +++ b/actix-http/src/header/map.rs @@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> { fn next(&mut self) -> Option { // handle in-progress multi value lists first - if let Some((ref name, ref mut vals)) = self.multi_inner { + if let Some((name, ref mut vals)) = self.multi_inner { match vals.get(self.multi_idx) { Some(val) => { self.multi_idx += 1; diff --git a/actix-http/src/lib.rs b/actix-http/src/lib.rs index d22e1ee44..17ee3ff29 100644 --- a/actix-http/src/lib.rs +++ b/actix-http/src/lib.rs @@ -14,7 +14,7 @@ //! [rustls]: https://crates.io/crates/rustls //! [trust-dns]: https://crates.io/crates/trust-dns -#![deny(rust_2018_idioms, nonstandard_style)] +#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)] #![allow( clippy::type_complexity, clippy::too_many_arguments, diff --git a/actix-http/src/message.rs b/actix-http/src/message.rs index e85d686b7..84125fb3a 100644 --- a/actix-http/src/message.rs +++ b/actix-http/src/message.rs @@ -209,7 +209,7 @@ impl RequestHeadType { impl AsRef for RequestHeadType { fn as_ref(&self) -> &RequestHead { match self { - RequestHeadType::Owned(head) => &head, + RequestHeadType::Owned(head) => head, RequestHeadType::Rc(head, _) => head.as_ref(), } } @@ -363,7 +363,7 @@ impl std::ops::Deref for Message { type Target = T; fn deref(&self) -> &Self::Target { - &self.head.as_ref() + self.head.as_ref() } } diff --git a/actix-router/src/path.rs b/actix-router/src/path.rs index e29591f96..9af7b0b8b 100644 --- a/actix-router/src/path.rs +++ b/actix-router/src/path.rs @@ -125,7 +125,7 @@ impl Path { for (seg_name, val) in self.segments.iter() { if name == seg_name { return match val { - PathItem::Static(ref s) => Some(&s), + PathItem::Static(ref s) => Some(s), PathItem::Segment(s, e) => { Some(&self.path.path()[(*s as usize)..(*e as usize)]) } @@ -183,7 +183,7 @@ impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> { if self.idx < self.params.segment_count() { let idx = self.idx; let res = match self.params.segments[idx].1 { - PathItem::Static(ref s) => &s, + PathItem::Static(ref s) => s, PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)], }; self.idx += 1; @@ -207,7 +207,7 @@ impl Index for Path { fn index(&self, idx: usize) -> &str { match self.segments[idx].1 { - PathItem::Static(ref s) => &s, + PathItem::Static(ref s) => s, PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)], } } diff --git a/actix-router/src/resource.rs b/actix-router/src/resource.rs index 61ff587a5..69e10b2bd 100644 --- a/actix-router/src/resource.rs +++ b/actix-router/src/resource.rs @@ -276,7 +276,7 @@ impl ResourceDef { let mut pattern_data = Vec::new(); for pattern in &patterns { - match ResourceDef::parse(&pattern, false, true) { + match ResourceDef::parse(pattern, false, true) { (PatternType::Dynamic(re, names), _) => { re_set.push(re.as_str().to_owned()); pattern_data.push((re, names)); @@ -790,7 +790,7 @@ impl ResourceDef { profile_section!(pattern_dynamic_extract_captures); for (no, name) in names.iter().enumerate() { - if let Some(m) = captures.name(&name) { + if let Some(m) = captures.name(name) { segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16); } else { log::error!( @@ -820,7 +820,7 @@ impl ResourceDef { }; for (no, name) in names.iter().enumerate() { - if let Some(m) = captures.name(&name) { + if let Some(m) = captures.name(name) { segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16); } else { log::error!("Dynamic path match but not all segments found: {}", name); diff --git a/src/http/header/content_disposition.rs b/src/http/header/content_disposition.rs index 9f67baffb..6e75fde92 100644 --- a/src/http/header/content_disposition.rs +++ b/src/http/header/content_disposition.rs @@ -457,7 +457,7 @@ impl Header for ContentDisposition { fn parse(msg: &T) -> Result { if let Some(h) = msg.headers().get(&Self::name()) { - Self::from_raw(&h) + Self::from_raw(h) } else { Err(crate::error::ParseError::Header) } diff --git a/src/middleware/logger.rs b/src/middleware/logger.rs index bbb0e3dc4..0f09b6ad6 100644 --- a/src/middleware/logger.rs +++ b/src/middleware/logger.rs @@ -553,7 +553,7 @@ impl FormatText { *self = FormatText::Str(s.to_string()); } FormatText::RemoteAddr => { - let s = if let Some(ref peer) = req.connection_info().remote_addr() { + let s = if let Some(peer) = req.connection_info().remote_addr() { FormatText::Str((*peer).to_string()) } else { FormatText::Str("-".to_string()) diff --git a/src/request.rs b/src/request.rs index 41c8252a8..59850b4ca 100644 --- a/src/request.rs +++ b/src/request.rs @@ -184,7 +184,7 @@ impl HttpRequest { U: IntoIterator, I: AsRef, { - self.resource_map().url_for(&self, name, elements) + self.resource_map().url_for(self, name, elements) } /// Generate url for named resource @@ -199,7 +199,7 @@ impl HttpRequest { #[inline] /// Get a reference to a `ResourceMap` of current application. pub fn resource_map(&self) -> &ResourceMap { - &self.app_state().rmap() + self.app_state().rmap() } /// Peer socket address. diff --git a/src/service.rs b/src/service.rs index 148199407..48167e5b3 100644 --- a/src/service.rs +++ b/src/service.rs @@ -117,7 +117,7 @@ impl ServiceRequest { /// This method returns reference to the request head #[inline] pub fn head(&self) -> &RequestHead { - &self.req.head() + self.req.head() } /// This method returns reference to the request head diff --git a/src/types/either.rs b/src/types/either.rs index d3b003587..35e63cec9 100644 --- a/src/types/either.rs +++ b/src/types/either.rs @@ -253,7 +253,7 @@ where Ok(bytes) => { let fallback = bytes.clone(); let left = - L::from_request(&this.req, &mut payload_from_bytes(bytes)); + L::from_request(this.req, &mut payload_from_bytes(bytes)); EitherExtractState::Left { left, fallback } } Err(err) => break Err(EitherExtractError::Bytes(err)), @@ -265,7 +265,7 @@ where Ok(extracted) => break Ok(Either::Left(extracted)), Err(left_err) => { let right = R::from_request( - &this.req, + this.req, &mut payload_from_bytes(mem::take(fallback)), ); EitherExtractState::Right { diff --git a/src/types/json.rs b/src/types/json.rs index fc02c8854..ab9708c53 100644 --- a/src/types/json.rs +++ b/src/types/json.rs @@ -425,7 +425,7 @@ where } } None => { - let json = serde_json::from_slice::(&buf) + let json = serde_json::from_slice::(buf) .map_err(JsonPayloadError::Deserialize)?; return Poll::Ready(Ok(json)); } From ff07816b650997b0050811c1fd300c0da1104b59 Mon Sep 17 00:00:00 2001 From: fakeshadow <24548779@qq.com> Date: Sun, 29 Aug 2021 08:42:22 +0800 Subject: [PATCH 05/23] update httparse for uninit header parsing (#2374) --- actix-http/Cargo.toml | 2 +- actix-http/src/h1/decoder.rs | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index 4ce55dca1..68f980982 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -59,7 +59,7 @@ futures-core = { version = "0.3.7", default-features = false, features = ["alloc futures-util = { version = "0.3.7", default-features = false, features = ["alloc", "sink"] } h2 = "0.3.1" http = "0.2.2" -httparse = "1.3" +httparse = "1.5.1" itoa = "0.4" language-tags = "0.3" local-channel = "0.1" diff --git a/actix-http/src/h1/decoder.rs b/actix-http/src/h1/decoder.rs index 313ffd5e0..91a3af44f 100644 --- a/actix-http/src/h1/decoder.rs +++ b/actix-http/src/h1/decoder.rs @@ -1,4 +1,4 @@ -use std::{convert::TryFrom, io, marker::PhantomData, task::Poll}; +use std::{convert::TryFrom, io, marker::PhantomData, mem::MaybeUninit, task::Poll}; use actix_codec::Decoder; use bytes::{Bytes, BytesMut}; @@ -212,10 +212,17 @@ impl MessageType for Request { let mut headers: [HeaderIndex; MAX_HEADERS] = EMPTY_HEADER_INDEX_ARRAY; let (len, method, uri, ver, h_len) = { - let mut parsed: [httparse::Header<'_>; MAX_HEADERS] = EMPTY_HEADER_ARRAY; + // SAFETY: + // Create an uninitialized array of `MaybeUninit`. The `assume_init` is + // safe because the type we are claiming to have initialized here is a + // bunch of `MaybeUninit`s, which do not require initialization. + let mut parsed = unsafe { + MaybeUninit::<[MaybeUninit>; MAX_HEADERS]>::uninit() + .assume_init() + }; - let mut req = httparse::Request::new(&mut parsed); - match req.parse(src)? { + let mut req = httparse::Request::new(&mut []); + match req.parse_with_uninit_headers(src, &mut parsed)? { httparse::Status::Complete(len) => { let method = Method::from_bytes(req.method.unwrap().as_bytes()) .map_err(|_| ParseError::Method)?; From f9da6e48e0aef496001528daa68298ff9107a895 Mon Sep 17 00:00:00 2001 From: Ali MJ Al-Nasrawy Date: Mon, 30 Aug 2021 22:05:49 +0300 Subject: [PATCH 06/23] ResourceDef: define behavior for prefix with trailing slash (#2355) * ResourceDef: define behavior * fix tests * add scope test * revert firestorm bump * update changelog * fmt Co-authored-by: Rob Ede --- actix-files/src/files.rs | 2 +- actix-router/CHANGES.md | 3 + actix-router/src/resource.rs | 169 +++++++++++++++++++---------------- src/scope.rs | 66 ++++++++++++++ 4 files changed, 163 insertions(+), 77 deletions(-) diff --git a/actix-files/src/files.rs b/actix-files/src/files.rs index 49d81eb03..68879822a 100644 --- a/actix-files/src/files.rs +++ b/actix-files/src/files.rs @@ -106,7 +106,7 @@ impl Files { }; Files { - path: mount_path.to_owned(), + path: mount_path.trim_end_matches('/').to_owned(), directory: dir, index: None, show_index: false, diff --git a/actix-router/CHANGES.md b/actix-router/CHANGES.md index dea7cb76f..140d108e2 100644 --- a/actix-router/CHANGES.md +++ b/actix-router/CHANGES.md @@ -5,11 +5,14 @@ * Disallow prefix routes with tail segments. [#379] * Enforce path separators on dynamic prefixes. [#378] * Improve malformed path error message. [#384] +* Prefix segments now always end with with a segment delimiter or end-of-input. [#2355] +* Prefix segments with trailing slashes define a trailing empty segment. [#2355] [#378]: https://github.com/actix/actix-net/pull/378 [#379]: https://github.com/actix/actix-net/pull/379 [#380]: https://github.com/actix/actix-net/pull/380 [#384]: https://github.com/actix/actix-net/pull/384 +[#2355]: https://github.com/actix/actix-web/pull/2355 ## 0.5.0-beta.1 - 2021-07-20 diff --git a/actix-router/src/resource.rs b/actix-router/src/resource.rs index 69e10b2bd..fbf29cc7a 100644 --- a/actix-router/src/resource.rs +++ b/actix-router/src/resource.rs @@ -28,9 +28,27 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// regex engine. /// /// +/// # Pattern Format and Matching Behavior +/// +/// Resource pattern is defined as a string of zero or more _segments_ where each segment is +/// preceeded by a slash `/`. +/// +/// This means that pattern string __must__ either be empty or begin with a slash (`/`). +/// This also implies that a trailing slash in pattern defines an empty segment. +/// For example, the pattern `"/user/"` has two segments: `["user", ""]` +/// +/// A key point to undertand is that `ResourceDef` matches segments, not strings. +/// It matches segments individually. +/// For example, the pattern `/user/` is not considered a prefix for the path `/user/123/456`, +/// because the second segment doesn't match: `["user", ""]` vs `["user", "123", "456"]`. +/// +/// This definition is consistent with the definition of absolute URL path in +/// [RFC 3986 (section 3.3)](https://datatracker.ietf.org/doc/html/rfc3986#section-3.3) +/// +/// /// # Static Resources -/// A static resource is the most basic type of definition. Pass a regular string to -/// [new][Self::new]. Conforming paths must match the string exactly. +/// A static resource is the most basic type of definition. Pass a pattern to +/// [new][Self::new]. Conforming paths must match the pattern exactly. /// /// ## Examples /// ``` @@ -39,6 +57,7 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// /// assert!(resource.is_match("/home")); /// +/// assert!(!resource.is_match("/home/")); /// assert!(!resource.is_match("/home/new")); /// assert!(!resource.is_match("/homes")); /// assert!(!resource.is_match("/search")); @@ -85,12 +104,13 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// /// /// # Prefix Resources -/// A prefix resource is defined as pattern that can match just the start of a path. +/// A prefix resource is defined as pattern that can match just the start of a path, up to a +/// segment boundary. /// -/// This library chooses to restrict that definition slightly. In particular, when matching, the -/// prefix must be separated from the remaining part of the path by a `/` character, either at the -/// end of the prefix pattern or at the start of the the remaining slice. In practice, this is not -/// much of a limitation. +/// Prefix patterns with a trailing slash may have an unexpected, though correct, behavior. +/// They define and therefore require an empty segment in order to match. Examples are given below. +/// +/// Empty pattern matches any path as a prefix. /// /// Prefix resources can contain dynamic segments. /// @@ -102,9 +122,12 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// assert!(resource.is_match("/home/new")); /// assert!(!resource.is_match("/homes")); /// +/// // prefix pattern with a trailing slash /// let resource = ResourceDef::prefix("/user/{id}/"); /// assert!(resource.is_match("/user/123/")); -/// assert!(resource.is_match("/user/123/stars")); +/// assert!(resource.is_match("/user/123//stars")); +/// assert!(!resource.is_match("/user/123/stars")); +/// assert!(!resource.is_match("/user/123")); /// ``` /// /// @@ -117,6 +140,10 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// `{name:regex}`. For example, `/user/{id:\d+}` will only match paths where the user ID /// is numeric. /// +/// The regex could potentially match multiple segments. If this is not wanted, then care must be +/// taken to avoid matching a slash `/`. It is guaranteed, however, that the match ends at a +/// segment boundary; the pattern `r"(/|$)` is always appended to the regex. +/// /// By default, dynamic segments use this regex: `[^/]+`. This shows why it is the case, as shown in /// the earlier section, that segments capture a slice of the path up to the next `/` character. /// @@ -298,7 +325,7 @@ impl ResourceDef { } } - /// Constructs a new resource definition using a string pattern that performs prefix matching. + /// Constructs a new resource definition using a pattern that performs prefix matching. /// /// More specifically, the regular expressions generated for matching are different when using /// this method vs using `new`; they will not be appended with the `$` meta-character that @@ -320,13 +347,6 @@ impl ResourceDef { /// assert!(!resource.is_match("user/123")); /// assert!(!resource.is_match("user/123/stars")); /// assert!(!resource.is_match("/foo")); - /// - /// let resource = ResourceDef::prefix("user/{id}"); - /// assert!(resource.is_match("user/123")); - /// assert!(resource.is_match("user/123/stars")); - /// assert!(!resource.is_match("/user/123")); - /// assert!(!resource.is_match("/user/123/stars")); - /// assert!(!resource.is_match("foo")); /// ``` pub fn prefix(path: &str) -> Self { profile_method!(prefix); @@ -591,24 +611,7 @@ impl ResourceDef { match self.pat_type { PatternType::Static(ref s) => s == path, - - PatternType::Prefix(ref prefix) if prefix == path => true, - PatternType::Prefix(ref prefix) => is_strict_prefix(prefix, path), - - // dynamic prefix - PatternType::Dynamic(ref re, _) if !re.as_str().ends_with('$') => { - match re.find(path) { - // prefix matches exactly - Some(m) if m.end() == path.len() => true, - - // prefix matches part - Some(m) => is_strict_prefix(m.as_str(), path), - - // prefix does not match - None => false, - } - } - + PatternType::Prefix(ref prefix) => is_prefix(prefix, path), PatternType::Dynamic(ref re, _) => re.is_match(path), PatternType::DynamicSet(ref re, _) => re.is_match(path), } @@ -656,30 +659,15 @@ impl ResourceDef { PatternType::Static(segment) if path == segment => Some(segment.len()), PatternType::Static(_) => None, - PatternType::Prefix(prefix) if path == prefix => Some(prefix.len()), - PatternType::Prefix(prefix) if is_strict_prefix(prefix, path) => Some(prefix.len()), + PatternType::Prefix(prefix) if is_prefix(prefix, path) => Some(prefix.len()), PatternType::Prefix(_) => None, - // dynamic prefix - PatternType::Dynamic(ref re, _) if !re.as_str().ends_with('$') => { - match re.find(path) { - // prefix matches exactly - Some(m) if m.end() == path.len() => Some(m.end()), - - // prefix matches part - Some(m) if is_strict_prefix(m.as_str(), path) => Some(m.end()), - - // prefix does not match - _ => None, - } - } - - PatternType::Dynamic(re, _) => re.find(path).map(|m| m.end()), + PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()), PatternType::DynamicSet(re, params) => { let idx = re.matches(path).into_iter().next()?; let (ref pattern, _) = params[idx]; - pattern.find(path).map(|m| m.end()) + Some(pattern.captures(path)?[1].len()) } } } @@ -802,7 +790,7 @@ impl ResourceDef { } }; - (captures[0].len(), Some(names)) + (captures[1].len(), Some(names)) } PatternType::DynamicSet(re, params) => { @@ -828,7 +816,7 @@ impl ResourceDef { } } - (captures[0].len(), Some(names)) + (captures[1].len(), Some(names)) } }; @@ -1112,8 +1100,16 @@ impl ResourceDef { ); } - if !is_prefix && !has_tail_segment { - re.push('$'); + // Store the pattern in capture group #1 to have context info outside it + let mut re = format!("({})", re); + + // Ensure the match ends at a segment boundary + if !has_tail_segment { + if is_prefix { + re.push_str(r"(/|$)"); + } else { + re.push('$'); + } } let re = match Regex::new(&re) { @@ -1185,10 +1181,12 @@ pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> { } /// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`. -/// -/// The `strict` refers to the fact that this will return `false` if `prefix == path`. -fn is_strict_prefix(prefix: &str, path: &str) -> bool { - path.starts_with(prefix) && (prefix.ends_with('/') || path[prefix.len()..].starts_with('/')) +fn is_prefix(prefix: &str, path: &str) -> bool { + match path.strip_prefix(prefix) { + // Ensure the match ends at segment boundary + Some(rem) if rem.is_empty() || rem.starts_with('/') => true, + _ => false, + } } #[cfg(test)] @@ -1501,54 +1499,70 @@ mod tests { let re = ResourceDef::prefix("/name/"); assert!(re.is_match("/name/")); - assert!(re.is_match("/name/gs")); + assert!(re.is_match("/name//gs")); + assert!(!re.is_match("/name/gs")); assert!(!re.is_match("/name")); let mut path = Path::new("/name/gs"); + assert!(!re.capture_match_info(&mut path)); + + let mut path = Path::new("/name//gs"); assert!(re.capture_match_info(&mut path)); - assert_eq!(path.unprocessed(), "gs"); + assert_eq!(path.unprocessed(), "/gs"); let re = ResourceDef::root_prefix("name/"); assert!(re.is_match("/name/")); - assert!(re.is_match("/name/gs")); + assert!(re.is_match("/name//gs")); + assert!(!re.is_match("/name/gs")); assert!(!re.is_match("/name")); let mut path = Path::new("/name/gs"); - assert!(re.capture_match_info(&mut path)); - assert_eq!(path.unprocessed(), "gs"); + assert!(!re.capture_match_info(&mut path)); } #[test] fn prefix_dynamic() { - let re = ResourceDef::prefix("/{name}/"); + let re = ResourceDef::prefix("/{name}"); assert!(re.is_prefix()); assert!(re.is_match("/name/")); assert!(re.is_match("/name/gs")); - assert!(!re.is_match("/name")); + assert!(re.is_match("/name")); - assert_eq!(re.find_match("/name/"), Some(6)); - assert_eq!(re.find_match("/name/gs"), Some(6)); - assert_eq!(re.find_match("/name"), None); + assert_eq!(re.find_match("/name/"), Some(5)); + assert_eq!(re.find_match("/name/gs"), Some(5)); + assert_eq!(re.find_match("/name"), Some(5)); + assert_eq!(re.find_match(""), None); let mut path = Path::new("/test2/"); assert!(re.capture_match_info(&mut path)); assert_eq!(&path["name"], "test2"); assert_eq!(&path[0], "test2"); - assert_eq!(path.unprocessed(), ""); + assert_eq!(path.unprocessed(), "/"); let mut path = Path::new("/test2/subpath1/subpath2/index.html"); assert!(re.capture_match_info(&mut path)); assert_eq!(&path["name"], "test2"); assert_eq!(&path[0], "test2"); - assert_eq!(path.unprocessed(), "subpath1/subpath2/index.html"); + assert_eq!(path.unprocessed(), "/subpath1/subpath2/index.html"); let resource = ResourceDef::prefix("/user"); // input string shorter than prefix assert!(resource.find_match("/foo").is_none()); } + #[test] + fn prefix_empty() { + let re = ResourceDef::prefix(""); + + assert!(re.is_prefix()); + + assert!(re.is_match("")); + assert!(re.is_match("/")); + assert!(re.is_match("/name/test/test")); + } + #[test] fn build_path_list() { let mut s = String::new(); @@ -1667,14 +1681,17 @@ mod tests { } #[test] - fn consistent_match_length() { - let result = Some(5); + fn prefix_trailing_slash() { + // The prefix "/abc/" matches two segments: ["user", ""] + // These are not prefixes let re = ResourceDef::prefix("/abc/"); - assert_eq!(re.find_match("/abc/def"), result); + assert_eq!(re.find_match("/abc/def"), None); + assert_eq!(re.find_match("/abc//def"), Some(5)); let re = ResourceDef::prefix("/{id}/"); - assert_eq!(re.find_match("/abc/def"), result); + assert_eq!(re.find_match("/abc/def"), None); + assert_eq!(re.find_match("/abc//def"), Some(5)); } #[test] diff --git a/src/scope.rs b/src/scope.rs index 97db53eeb..b2edaedab 100644 --- a/src/scope.rs +++ b/src/scope.rs @@ -1153,4 +1153,70 @@ mod tests { Bytes::from_static(b"http://localhost:8080/a/b/c/12345") ); } + + #[actix_rt::test] + async fn dynamic_scopes() { + let srv = init_service( + App::new().service( + web::scope("/{a}/").service( + web::scope("/{b}/") + .route("", web::get().to(|_: HttpRequest| HttpResponse::Created())) + .route( + "/", + web::get().to(|_: HttpRequest| HttpResponse::Accepted()), + ) + .route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())), + ), + ), + ) + .await; + + // note the unintuitive behavior with trailing slashes on scopes with dynamic segments + let req = TestRequest::with_uri("/a//b//c").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::OK); + + let req = TestRequest::with_uri("/a//b/").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::CREATED); + + let req = TestRequest::with_uri("/a//b//").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::ACCEPTED); + + let req = TestRequest::with_uri("/a//b//c/d").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::NOT_FOUND); + + let srv = init_service( + App::new().service( + web::scope("/{a}").service( + web::scope("/{b}") + .route("", web::get().to(|_: HttpRequest| HttpResponse::Created())) + .route( + "/", + web::get().to(|_: HttpRequest| HttpResponse::Accepted()), + ) + .route("/{c}", web::get().to(|_: HttpRequest| HttpResponse::Ok())), + ), + ), + ) + .await; + + let req = TestRequest::with_uri("/a/b/c").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::OK); + + let req = TestRequest::with_uri("/a/b").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::CREATED); + + let req = TestRequest::with_uri("/a/b/").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::ACCEPTED); + + let req = TestRequest::with_uri("/a/b/c/d").to_request(); + let resp = call_service(&srv, req).await; + assert_eq!(resp.status(), StatusCode::NOT_FOUND); + } } From 4bb32fb19b5fb4105e7ca2b7371557d0d21b0346 Mon Sep 17 00:00:00 2001 From: Sam De Roeck <31270289+sadroeck@users.noreply.github.com> Date: Mon, 30 Aug 2021 21:07:12 +0200 Subject: [PATCH 07/23] [fix] Bump actix-http dependency to 3.0.0-beta.9, up from 3.0.0-beta.8 (#2360) Fixes https://rustsec.org/advisories/RUSTSEC-2021-0081 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index ff3321f47..f2ce46ee1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,7 +76,7 @@ actix-utils = "3.0.0" actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true } actix-web-codegen = "0.5.0-beta.2" -actix-http = "3.0.0-beta.8" +actix-http = "3.0.0-beta.9" ahash = "0.7" bytes = "1" From 168b2f227d1959252dc47518641da7d214a81ed3 Mon Sep 17 00:00:00 2001 From: Aravinth Manivannan Date: Tue, 31 Aug 2021 02:20:40 +0530 Subject: [PATCH 08/23] compile time validation of path (#2350) * compile time validation of path * added trybuild err message * Update Cargo.toml * add changelog entry * test more cases of path validation * fmt Co-authored-by: Rob Ede --- actix-router/src/resource.rs | 5 ++- actix-web-codegen/CHANGES.md | 3 ++ actix-web-codegen/Cargo.toml | 1 + actix-web-codegen/src/route.rs | 2 + actix-web-codegen/tests/trybuild.rs | 1 + .../trybuild/route-malformed-path-fail.rs | 33 +++++++++++++++ .../trybuild/route-malformed-path-fail.stderr | 42 +++++++++++++++++++ 7 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 actix-web-codegen/tests/trybuild/route-malformed-path-fail.rs create mode 100644 actix-web-codegen/tests/trybuild/route-malformed-path-fail.stderr diff --git a/actix-router/src/resource.rs b/actix-router/src/resource.rs index fbf29cc7a..57ce36804 100644 --- a/actix-router/src/resource.rs +++ b/actix-router/src/resource.rs @@ -967,7 +967,10 @@ impl ResourceDef { _ => false, }) .unwrap_or_else(|| { - panic!(r#"path "{}" contains malformed dynamic segment"#, pattern) + panic!( + r#"pattern "{}" contains malformed dynamic segment"#, + pattern + ) }); let (mut param, mut unprocessed) = pattern.split_at(close_idx + 1); diff --git a/actix-web-codegen/CHANGES.md b/actix-web-codegen/CHANGES.md index a8a901f72..4fd393b4d 100644 --- a/actix-web-codegen/CHANGES.md +++ b/actix-web-codegen/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx +* In routing macros, paths are now validated at compile time. [#2350] + +[#2350]: https://github.com/actix/actix-web/pull/2350 ## 0.5.0-beta.3 - 2021-06-17 diff --git a/actix-web-codegen/Cargo.toml b/actix-web-codegen/Cargo.toml index 4d0fd5e26..66f7acf6d 100644 --- a/actix-web-codegen/Cargo.toml +++ b/actix-web-codegen/Cargo.toml @@ -17,6 +17,7 @@ proc-macro = true quote = "1" syn = { version = "1", features = ["full", "parsing"] } proc-macro2 = "1" +actix-router = "0.5.0-beta.1" [dev-dependencies] actix-rt = "2.2" diff --git a/actix-web-codegen/src/route.rs b/actix-web-codegen/src/route.rs index 747042527..c2f851a0e 100644 --- a/actix-web-codegen/src/route.rs +++ b/actix-web-codegen/src/route.rs @@ -3,6 +3,7 @@ extern crate proc_macro; use std::collections::HashSet; use std::convert::TryFrom; +use actix_router::ResourceDef; use proc_macro::TokenStream; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::{format_ident, quote, ToTokens, TokenStreamExt}; @@ -101,6 +102,7 @@ impl Args { match arg { NestedMeta::Lit(syn::Lit::Str(lit)) => match path { None => { + let _ = ResourceDef::new(lit.value()); path = Some(lit); } _ => { diff --git a/actix-web-codegen/tests/trybuild.rs b/actix-web-codegen/tests/trybuild.rs index 12e848cf3..c97211e9f 100644 --- a/actix-web-codegen/tests/trybuild.rs +++ b/actix-web-codegen/tests/trybuild.rs @@ -10,6 +10,7 @@ fn compile_macros() { t.compile_fail("tests/trybuild/route-missing-method-fail.rs"); t.compile_fail("tests/trybuild/route-duplicate-method-fail.rs"); t.compile_fail("tests/trybuild/route-unexpected-method-fail.rs"); + t.compile_fail("tests/trybuild/route-malformed-path-fail.rs"); t.pass("tests/trybuild/docstring-ok.rs"); } diff --git a/actix-web-codegen/tests/trybuild/route-malformed-path-fail.rs b/actix-web-codegen/tests/trybuild/route-malformed-path-fail.rs new file mode 100644 index 000000000..1258a6f2f --- /dev/null +++ b/actix-web-codegen/tests/trybuild/route-malformed-path-fail.rs @@ -0,0 +1,33 @@ +use actix_web_codegen::get; + +#[get("/{")] +async fn zero() -> &'static str { + "malformed resource def" +} + +#[get("/{foo")] +async fn one() -> &'static str { + "malformed resource def" +} + +#[get("/{}")] +async fn two() -> &'static str { + "malformed resource def" +} + +#[get("/*")] +async fn three() -> &'static str { + "malformed resource def" +} + +#[get("/{tail:\\d+}*")] +async fn four() -> &'static str { + "malformed resource def" +} + +#[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")] +async fn five() -> &'static str { + "malformed resource def" +} + +fn main() {} diff --git a/actix-web-codegen/tests/trybuild/route-malformed-path-fail.stderr b/actix-web-codegen/tests/trybuild/route-malformed-path-fail.stderr new file mode 100644 index 000000000..93c510109 --- /dev/null +++ b/actix-web-codegen/tests/trybuild/route-malformed-path-fail.stderr @@ -0,0 +1,42 @@ +error: custom attribute panicked + --> $DIR/route-malformed-path-fail.rs:3:1 + | +3 | #[get("/{")] + | ^^^^^^^^^^^^ + | + = help: message: pattern "{" contains malformed dynamic segment + +error: custom attribute panicked + --> $DIR/route-malformed-path-fail.rs:8:1 + | +8 | #[get("/{foo")] + | ^^^^^^^^^^^^^^^ + | + = help: message: pattern "{foo" contains malformed dynamic segment + +error: custom attribute panicked + --> $DIR/route-malformed-path-fail.rs:13:1 + | +13 | #[get("/{}")] + | ^^^^^^^^^^^^^ + | + = help: message: Wrong path pattern: "/{}" regex parse error: + ((?s-m)^/(?P<>[^/]+))$ + ^ + error: empty capture group name + +error: custom attribute panicked + --> $DIR/route-malformed-path-fail.rs:23:1 + | +23 | #[get("/{tail:\\d+}*")] + | ^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: custom regex is not supported for tail match + +error: custom attribute panicked + --> $DIR/route-malformed-path-fail.rs:28:1 + | +28 | #[get("/{a}/{b}/{c}/{d}/{e}/{f}/{g}/{h}/{i}/{j}/{k}/{l}/{m}/{n}/{o}/{p}/{q}")] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: message: Only 16 dynamic segments are allowed, provided: 17 From 5128b1bdfc0c47fc744f2bc1f417ef5fd0e7f3c1 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Mon, 30 Aug 2021 23:19:03 +0100 Subject: [PATCH 09/23] bump msrv to 1.51 --- .github/workflows/ci.yml | 2 +- CHANGES.md | 3 +++ README.md | 4 ++-- actix-files/CHANGES.md | 1 + actix-files/README.md | 4 ++-- actix-http-test/CHANGES.md | 1 + actix-http-test/README.md | 4 ++-- actix-http/CHANGES.md | 3 +++ actix-http/README.md | 4 ++-- actix-http/src/body/mod.rs | 2 +- actix-http/src/h1/chunked.rs | 8 ++++---- actix-http/src/h1/dispatcher.rs | 2 +- actix-multipart/CHANGES.md | 1 + actix-multipart/README.md | 4 ++-- actix-router/CHANGES.md | 1 + actix-test/CHANGES.md | 1 + actix-web-actors/CHANGES.md | 1 + actix-web-actors/README.md | 4 ++-- actix-web-codegen/CHANGES.md | 1 + actix-web-codegen/README.md | 4 ++-- actix-web-codegen/tests/trybuild.rs | 2 +- awc/README.md | 2 +- clippy.toml | 2 +- src/lib.rs | 2 +- src/responder.rs | 6 +++--- src/types/query.rs | 4 ++-- 26 files changed, 43 insertions(+), 30 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 22b92759a..221d2fb40 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: - { name: macOS, os: macos-latest, triple: x86_64-apple-darwin } - { name: Windows, os: windows-latest, triple: x86_64-pc-windows-msvc } version: - - 1.46.0 # MSRV + - 1.51.0 # MSRV - stable - nightly diff --git a/CHANGES.md b/CHANGES.md index 88295ec12..5325caf48 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,6 +4,9 @@ ### Added * Re-export actix-service `ServiceFactory` in `dev` module. [#2325] +### Changes +* Minimum supported Rust version (MSRV) is now 1.51. + [#2325]: https://github.com/actix/actix-web/pull/2325 diff --git a/README.md b/README.md index 309a18466..33784d66a 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web) [![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.8)](https://docs.rs/actix-web/4.0.0-beta.8) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg) [![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.8)
@@ -32,7 +32,7 @@ * SSL support using OpenSSL or Rustls * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/)) * Includes an async [HTTP client](https://docs.rs/awc/) -* Runs on stable Rust 1.46+ +* Runs on stable Rust 1.51+ ## Documentation diff --git a/actix-files/CHANGES.md b/actix-files/CHANGES.md index db047c44c..533f72291 100644 --- a/actix-files/CHANGES.md +++ b/actix-files/CHANGES.md @@ -1,6 +1,7 @@ # Changes ## Unreleased - 2021-xx-xx +* Minimum supported Rust version (MSRV) is now 1.51. ## 0.6.0-beta.6 - 2021-06-26 diff --git a/actix-files/README.md b/actix-files/README.md index 13c301c56..5815ef563 100644 --- a/actix-files/README.md +++ b/actix-files/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) [![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.6)](https://docs.rs/actix-files/0.6.0-beta.6) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-files.svg)
[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.6/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.6) @@ -15,4 +15,4 @@ - [API Documentation](https://docs.rs/actix-files/) - [Example Project](https://github.com/actix/examples/tree/master/basics/static_index) -- Minimum supported Rust version: 1.46 or later +- Minimum supported Rust version: 1.51 or later diff --git a/actix-http-test/CHANGES.md b/actix-http-test/CHANGES.md index 1dbd9a15b..39b6a3a66 100644 --- a/actix-http-test/CHANGES.md +++ b/actix-http-test/CHANGES.md @@ -1,6 +1,7 @@ # Changes ## Unreleased - 2021-xx-xx +* Minimum supported Rust version (MSRV) is now 1.51. ## 3.0.0-beta.4 - 2021-04-02 diff --git a/actix-http-test/README.md b/actix-http-test/README.md index 74260a352..099fb385d 100644 --- a/actix-http-test/README.md +++ b/actix-http-test/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) [![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.4)](https://docs.rs/actix-http-test/3.0.0-beta.4) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.4/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.4) @@ -14,4 +14,4 @@ ## Documentation & Resources - [API Documentation](https://docs.rs/actix-http-test) -- Minimum Supported Rust Version (MSRV): 1.46.0 +- Minimum Supported Rust Version (MSRV): 1.51.0 diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 9ed28105f..57c09d2d8 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -1,12 +1,15 @@ # Changes ## Unreleased - 2021-xx-xx +### Changes +* Minimum supported Rust version (MSRV) is now 1.51. ### Fixed * Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364] [#2364]: https://github.com/actix/actix-web/pull/2364 + ## 3.0.0-beta.8 - 2021-08-09 ### Fixed * Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) diff --git a/actix-http/README.md b/actix-http/README.md index 5b06583bc..c509eaff8 100644 --- a/actix-http/README.md +++ b/actix-http/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) [![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.9)](https://docs.rs/actix-http/3.0.0-beta.9) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.9) @@ -14,7 +14,7 @@ ## Documentation & Resources - [API Documentation](https://docs.rs/actix-http) -- Minimum Supported Rust Version (MSRV): 1.46.0 +- Minimum Supported Rust Version (MSRV): 1.51.0 ## Example diff --git a/actix-http/src/body/mod.rs b/actix-http/src/body/mod.rs index 8a08dbd2b..a60a8895c 100644 --- a/actix-http/src/body/mod.rs +++ b/actix-http/src/body/mod.rs @@ -80,7 +80,7 @@ mod tests { impl Body { pub(crate) fn get_ref(&self) -> &[u8] { match *self { - Body::Bytes(ref bin) => &bin, + Body::Bytes(ref bin) => bin, _ => panic!(), } } diff --git a/actix-http/src/h1/chunked.rs b/actix-http/src/h1/chunked.rs index 1224ce08c..e5b734fff 100644 --- a/actix-http/src/h1/chunked.rs +++ b/actix-http/src/h1/chunked.rs @@ -40,7 +40,7 @@ impl ChunkedState { Size => ChunkedState::read_size(body, size), SizeLws => ChunkedState::read_size_lws(body), Extension => ChunkedState::read_extension(body), - SizeLf => ChunkedState::read_size_lf(body, size), + SizeLf => ChunkedState::read_size_lf(body, *size), Body => ChunkedState::read_body(body, size, buf), BodyCr => ChunkedState::read_body_cr(body), BodyLf => ChunkedState::read_body_lf(body), @@ -113,11 +113,11 @@ impl ChunkedState { } fn read_size_lf( rdr: &mut BytesMut, - size: &mut u64, + size: u64, ) -> Poll> { match byte!(rdr) { - b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)), - b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), + b'\n' if size > 0 => Poll::Ready(Ok(ChunkedState::Body)), + b'\n' if size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), _ => Poll::Ready(Err(io::Error::new( io::ErrorKind::InvalidInput, "Invalid chunk size LF", diff --git a/actix-http/src/h1/dispatcher.rs b/actix-http/src/h1/dispatcher.rs index deb25763c..aef765b89 100644 --- a/actix-http/src/h1/dispatcher.rs +++ b/actix-http/src/h1/dispatcher.rs @@ -1060,7 +1060,7 @@ mod tests { fn stabilize_date_header(payload: &mut [u8]) { let mut from = 0; - while let Some(pos) = find_slice(&payload, b"date", from) { + while let Some(pos) = find_slice(payload, b"date", from) { payload[(from + pos)..(from + pos + 35)] .copy_from_slice(b"date: Thu, 01 Jan 1970 12:34:56 UTC"); from += 35; diff --git a/actix-multipart/CHANGES.md b/actix-multipart/CHANGES.md index 0b6affa3c..1e768ddf5 100644 --- a/actix-multipart/CHANGES.md +++ b/actix-multipart/CHANGES.md @@ -1,6 +1,7 @@ # Changes ## Unreleased - 2021-xx-xx +* Minimum supported Rust version (MSRV) is now 1.51. ## 0.4.0-beta.5 - 2021-06-17 diff --git a/actix-multipart/README.md b/actix-multipart/README.md index 78855b815..aed16721c 100644 --- a/actix-multipart/README.md +++ b/actix-multipart/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) [![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.5)](https://docs.rs/actix-multipart/0.4.0-beta.5) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.5/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.5) @@ -14,4 +14,4 @@ ## Documentation & Resources - [API Documentation](https://docs.rs/actix-multipart) -- Minimum Supported Rust Version (MSRV): 1.46.0 +- Minimum Supported Rust Version (MSRV): 1.51.0 diff --git a/actix-router/CHANGES.md b/actix-router/CHANGES.md index 140d108e2..804f7778d 100644 --- a/actix-router/CHANGES.md +++ b/actix-router/CHANGES.md @@ -7,6 +7,7 @@ * Improve malformed path error message. [#384] * Prefix segments now always end with with a segment delimiter or end-of-input. [#2355] * Prefix segments with trailing slashes define a trailing empty segment. [#2355] +* Minimum supported Rust version (MSRV) is now 1.51. [#378]: https://github.com/actix/actix-net/pull/378 [#379]: https://github.com/actix/actix-net/pull/379 diff --git a/actix-test/CHANGES.md b/actix-test/CHANGES.md index fa554ba2e..dc76ba3fd 100644 --- a/actix-test/CHANGES.md +++ b/actix-test/CHANGES.md @@ -1,6 +1,7 @@ # Changes ## Unreleased - 2021-xx-xx +* Minimum supported Rust version (MSRV) is now 1.51. ## 0.1.0-beta.3 - 2021-06-20 diff --git a/actix-web-actors/CHANGES.md b/actix-web-actors/CHANGES.md index bf642ef95..084e7b272 100644 --- a/actix-web-actors/CHANGES.md +++ b/actix-web-actors/CHANGES.md @@ -1,6 +1,7 @@ # Changes ## Unreleased - 2021-xx-xx +* Minimum supported Rust version (MSRV) is now 1.51. ## 4.0.0-beta.6 - 2021-06-26 diff --git a/actix-web-actors/README.md b/actix-web-actors/README.md index 5f8f78bde..2858d3f20 100644 --- a/actix-web-actors/README.md +++ b/actix-web-actors/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) [![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.6)](https://docs.rs/actix-web-actors/4.0.0-beta.6) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6) @@ -14,4 +14,4 @@ ## Documentation & Resources - [API Documentation](https://docs.rs/actix-web-actors) -- Minimum supported Rust version: 1.46 or later +- Minimum supported Rust version: 1.51 or later diff --git a/actix-web-codegen/CHANGES.md b/actix-web-codegen/CHANGES.md index 4fd393b4d..f0a56b30f 100644 --- a/actix-web-codegen/CHANGES.md +++ b/actix-web-codegen/CHANGES.md @@ -2,6 +2,7 @@ ## Unreleased - 2021-xx-xx * In routing macros, paths are now validated at compile time. [#2350] +* Minimum supported Rust version (MSRV) is now 1.51. [#2350]: https://github.com/actix/actix-web/pull/2350 diff --git a/actix-web-codegen/README.md b/actix-web-codegen/README.md index 96e4cb51f..e69cfbbe5 100644 --- a/actix-web-codegen/README.md +++ b/actix-web-codegen/README.md @@ -4,7 +4,7 @@ [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) [![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.3)](https://docs.rs/actix-web-codegen/0.5.0-beta.3) -[![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) +[![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
[![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3) @@ -14,7 +14,7 @@ ## Documentation & Resources - [API Documentation](https://docs.rs/actix-web-codegen) -- Minimum supported Rust version: 1.46 or later. +- Minimum supported Rust version: 1.51 or later. ## Compile Testing diff --git a/actix-web-codegen/tests/trybuild.rs b/actix-web-codegen/tests/trybuild.rs index c97211e9f..54bc1caec 100644 --- a/actix-web-codegen/tests/trybuild.rs +++ b/actix-web-codegen/tests/trybuild.rs @@ -1,4 +1,4 @@ -#[rustversion::stable(1.46)] // MSRV +#[rustversion::stable(1.51)] // MSRV #[test] fn compile_macros() { let t = trybuild::TestCases::new(); diff --git a/awc/README.md b/awc/README.md index dd08c6e10..fe91383ca 100644 --- a/awc/README.md +++ b/awc/README.md @@ -12,7 +12,7 @@ - [API Documentation](https://docs.rs/awc) - [Example Project](https://github.com/actix/examples/tree/HEAD/security/awc_https) -- Minimum Supported Rust Version (MSRV): 1.46.0 +- Minimum Supported Rust Version (MSRV): 1.51.0 ## Example diff --git a/clippy.toml b/clippy.toml index eb66960ac..829dd1c59 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1 +1 @@ -msrv = "1.46" +msrv = "1.51" diff --git a/src/lib.rs b/src/lib.rs index 714c759cf..e7cf46361 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -53,7 +53,7 @@ //! * SSL support using OpenSSL or Rustls //! * Middlewares ([Logger, Session, CORS, etc](https://actix.rs/docs/middleware/)) //! * Includes an async [HTTP client](https://docs.rs/awc/) -//! * Runs on stable Rust 1.46+ +//! * Runs on stable Rust 1.51+ //! //! # Crate Features //! * `cookies` - cookies support (enabled by default) diff --git a/src/responder.rs b/src/responder.rs index c5852a501..005bff03e 100644 --- a/src/responder.rs +++ b/src/responder.rs @@ -270,7 +270,7 @@ pub(crate) mod tests { impl BodyTest for Body { fn bin_ref(&self) -> &[u8] { match self { - Body::Bytes(ref bin) => &bin, + Body::Bytes(ref bin) => bin, _ => unreachable!("bug in test impl"), } } @@ -283,11 +283,11 @@ pub(crate) mod tests { fn bin_ref(&self) -> &[u8] { match self { ResponseBody::Body(ref b) => match b { - Body::Bytes(ref bin) => &bin, + Body::Bytes(ref bin) => bin, _ => unreachable!("bug in test impl"), }, ResponseBody::Other(ref b) => match b { - Body::Bytes(ref bin) => &bin, + Body::Bytes(ref bin) => bin, _ => unreachable!("bug in test impl"), }, } diff --git a/src/types/query.rs b/src/types/query.rs index 8762547e6..1e6f1111f 100644 --- a/src/types/query.rs +++ b/src/types/query.rs @@ -213,10 +213,10 @@ mod tests { #[actix_rt::test] async fn test_service_request_extract() { let req = TestRequest::with_uri("/name/user1/").to_srv_request(); - assert!(Query::::from_query(&req.query_string()).is_err()); + assert!(Query::::from_query(req.query_string()).is_err()); let req = TestRequest::with_uri("/name/user1/?id=test").to_srv_request(); - let mut s = Query::::from_query(&req.query_string()).unwrap(); + let mut s = Query::::from_query(req.query_string()).unwrap(); assert_eq!(s.id, "test"); assert_eq!( From ae35e69382805164704d8d7c79f41c85089b3d36 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Tue, 31 Aug 2021 02:52:29 +0100 Subject: [PATCH 10/23] use rust 1.51 features --- Cargo.toml | 1 + actix-http/src/body/body.rs | 24 +++++++++--------------- actix-http/src/body/response_body.rs | 9 ++------- actix-http/src/encoding/encoder.rs | 13 ++----------- actix-http/src/h1/utils.rs | 5 +---- src/middleware/logger.rs | 1 - 6 files changed, 15 insertions(+), 38 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f2ce46ee1..cee401363 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ name = "actix_web" path = "src/lib.rs" [workspace] +resolver = "2" members = [ ".", "awc", diff --git a/actix-http/src/body/body.rs b/actix-http/src/body/body.rs index f04837d07..cd3e4c5c4 100644 --- a/actix-http/src/body/body.rs +++ b/actix-http/src/body/body.rs @@ -7,7 +7,7 @@ use std::{ }; use bytes::{Bytes, BytesMut}; -use futures_core::{ready, Stream}; +use futures_core::Stream; use crate::error::Error; @@ -74,14 +74,10 @@ impl MessageBody for AnyBody { } } - // TODO: MSRV 1.51: poll_map_err - AnyBody::Message(body) => match ready!(body.as_pin_mut().poll_next(cx)) { - Some(Err(err)) => { - Poll::Ready(Some(Err(Error::new_body().with_cause(err)))) - } - Some(Ok(val)) => Poll::Ready(Some(Ok(val))), - None => Poll::Ready(None), - }, + AnyBody::Message(body) => body + .as_pin_mut() + .poll_next(cx) + .map_err(|err| Error::new_body().with_cause(err)), } } } @@ -223,11 +219,9 @@ impl MessageBody for BoxAnyBody { mut self: Pin<&mut Self>, cx: &mut Context<'_>, ) -> Poll>> { - // TODO: MSRV 1.51: poll_map_err - match ready!(self.0.as_mut().poll_next(cx)) { - Some(Err(err)) => Poll::Ready(Some(Err(Error::new_body().with_cause(err)))), - Some(Ok(val)) => Poll::Ready(Some(Ok(val))), - None => Poll::Ready(None), - } + self.0 + .as_mut() + .poll_next(cx) + .map_err(|err| Error::new_body().with_cause(err)) } } diff --git a/actix-http/src/body/response_body.rs b/actix-http/src/body/response_body.rs index 855c742f2..699ea9384 100644 --- a/actix-http/src/body/response_body.rs +++ b/actix-http/src/body/response_body.rs @@ -5,7 +5,7 @@ use std::{ }; use bytes::Bytes; -use futures_core::{ready, Stream}; +use futures_core::Stream; use pin_project::pin_project; use crate::error::Error; @@ -77,12 +77,7 @@ where cx: &mut Context<'_>, ) -> Poll> { match self.project() { - // TODO: MSRV 1.51: poll_map_err - ResponseBodyProj::Body(body) => match ready!(body.poll_next(cx)) { - Some(Err(err)) => Poll::Ready(Some(Err(err.into()))), - Some(Ok(val)) => Poll::Ready(Some(Ok(val))), - None => Poll::Ready(None), - }, + ResponseBodyProj::Body(body) => body.poll_next(cx).map_err(Into::into), ResponseBodyProj::Other(body) => Pin::new(body).poll_next(cx), } } diff --git a/actix-http/src/encoding/encoder.rs b/actix-http/src/encoding/encoder.rs index 1e69990a0..c39c0e888 100644 --- a/actix-http/src/encoding/encoder.rs +++ b/actix-http/src/encoding/encoder.rs @@ -131,18 +131,9 @@ where Poll::Ready(Some(Ok(std::mem::take(b)))) } } - // TODO: MSRV 1.51: poll_map_err - EncoderBodyProj::Stream(b) => match ready!(b.poll_next(cx)) { - Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Body(err)))), - Some(Ok(val)) => Poll::Ready(Some(Ok(val))), - None => Poll::Ready(None), - }, + EncoderBodyProj::Stream(b) => b.poll_next(cx).map_err(EncoderError::Body), EncoderBodyProj::BoxedStream(ref mut b) => { - match ready!(b.as_pin_mut().poll_next(cx)) { - Some(Err(err)) => Poll::Ready(Some(Err(EncoderError::Boxed(err)))), - Some(Ok(val)) => Poll::Ready(Some(Ok(val))), - None => Poll::Ready(None), - } + b.as_pin_mut().poll_next(cx).map_err(EncoderError::Boxed) } } } diff --git a/actix-http/src/h1/utils.rs b/actix-http/src/h1/utils.rs index 523e652fd..5fd3cc21c 100644 --- a/actix-http/src/h1/utils.rs +++ b/actix-http/src/h1/utils.rs @@ -63,12 +63,9 @@ where .is_write_buf_full() { let next = - // TODO: MSRV 1.51: poll_map_err match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) { Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)), - Poll::Ready(Some(Err(err))) => { - return Poll::Ready(Err(err.into())) - } + Poll::Ready(Some(Err(err))) => return Poll::Ready(Err(err.into())), Poll::Ready(None) => Poll::Ready(None), Poll::Pending => Poll::Pending, }; diff --git a/src/middleware/logger.rs b/src/middleware/logger.rs index 0f09b6ad6..9574b02f7 100644 --- a/src/middleware/logger.rs +++ b/src/middleware/logger.rs @@ -341,7 +341,6 @@ where ) -> Poll>> { let this = self.project(); - // TODO: MSRV 1.51: poll_map_err match ready!(this.body.poll_next(cx)) { Some(Ok(chunk)) => { *this.size += chunk.len(); From dade818ebaab441e8cc5d359068209daa002b488 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Tue, 31 Aug 2021 04:18:54 +0100 Subject: [PATCH 11/23] add middleware composition tests (#2375) --- actix-http/CHANGES.md | 2 ++ actix-http/src/body/message_body.rs | 4 --- actix-http/src/encoding/encoder.rs | 4 +-- actix-http/src/h1/utils.rs | 4 ++- src/middleware/mod.rs | 40 +++++++++++++++++++++++++++++ 5 files changed, 46 insertions(+), 8 deletions(-) diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 57c09d2d8..63172e56d 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -6,8 +6,10 @@ ### Fixed * Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364] +* Remove `Into` bound on `Encoder` body types. [#2375] [#2364]: https://github.com/actix/actix-web/pull/2364 +[#2375]: https://github.com/actix/actix-web/pull/2375 ## 3.0.0-beta.8 - 2021-08-09 diff --git a/actix-http/src/body/message_body.rs b/actix-http/src/body/message_body.rs index 2d2642ba7..edb4c550c 100644 --- a/actix-http/src/body/message_body.rs +++ b/actix-http/src/body/message_body.rs @@ -11,8 +11,6 @@ use bytes::{Bytes, BytesMut}; use futures_core::ready; use pin_project_lite::pin_project; -use crate::error::Error; - use super::BodySize; /// An interface for response bodies. @@ -47,7 +45,6 @@ impl MessageBody for () { impl MessageBody for Box where B: MessageBody + Unpin, - B::Error: Into, { type Error = B::Error; @@ -66,7 +63,6 @@ where impl MessageBody for Pin> where B: MessageBody, - B::Error: Into, { type Error = B::Error; diff --git a/actix-http/src/encoding/encoder.rs b/actix-http/src/encoding/encoder.rs index c39c0e888..abd8cedba 100644 --- a/actix-http/src/encoding/encoder.rs +++ b/actix-http/src/encoding/encoder.rs @@ -29,7 +29,7 @@ use crate::{ header::{ContentEncoding, CONTENT_ENCODING}, HeaderValue, StatusCode, }, - Error, ResponseHead, + ResponseHead, }; use super::Writer; @@ -107,7 +107,6 @@ enum EncoderBody { impl MessageBody for EncoderBody where B: MessageBody, - B::Error: Into, { type Error = EncoderError; @@ -142,7 +141,6 @@ where impl MessageBody for Encoder where B: MessageBody, - B::Error: Into, { type Error = EncoderError; diff --git a/actix-http/src/h1/utils.rs b/actix-http/src/h1/utils.rs index 5fd3cc21c..2547f4494 100644 --- a/actix-http/src/h1/utils.rs +++ b/actix-http/src/h1/utils.rs @@ -65,7 +65,9 @@ where let next = match this.body.as_mut().as_pin_mut().unwrap().poll_next(cx) { Poll::Ready(Some(Ok(item))) => Poll::Ready(Some(item)), - Poll::Ready(Some(Err(err))) => return Poll::Ready(Err(err.into())), + Poll::Ready(Some(Err(err))) => { + return Poll::Ready(Err(err.into())) + } Poll::Ready(None) => Poll::Ready(None), Poll::Pending => Poll::Pending, }; diff --git a/src/middleware/mod.rs b/src/middleware/mod.rs index 96a361fcf..d19cb64e9 100644 --- a/src/middleware/mod.rs +++ b/src/middleware/mod.rs @@ -19,3 +19,43 @@ mod compress; #[cfg(feature = "__compress")] pub use self::compress::Compress; + +#[cfg(test)] +mod tests { + use crate::{http::StatusCode, App}; + + use super::*; + + #[test] + fn common_combinations() { + // ensure there's no reason that the built-in middleware cannot compose + + let _ = App::new() + .wrap(Compat::new(Logger::default())) + .wrap(Condition::new(true, DefaultHeaders::new())) + .wrap(DefaultHeaders::new().header("X-Test2", "X-Value2")) + .wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| { + Ok(ErrorHandlerResponse::Response(res)) + })) + .wrap(Logger::default()) + .wrap(NormalizePath::new(TrailingSlash::Trim)); + + let _ = App::new() + .wrap(NormalizePath::new(TrailingSlash::Trim)) + .wrap(Logger::default()) + .wrap(ErrorHandlers::new().handler(StatusCode::FORBIDDEN, |res| { + Ok(ErrorHandlerResponse::Response(res)) + })) + .wrap(DefaultHeaders::new().header("X-Test2", "X-Value2")) + .wrap(Condition::new(true, DefaultHeaders::new())) + .wrap(Compat::new(Logger::default())); + + #[cfg(feature = "__compress")] + { + let _ = App::new().wrap(Compress::default()).wrap(Logger::default()); + let _ = App::new().wrap(Logger::default()).wrap(Compress::default()); + let _ = App::new().wrap(Compat::new(Compress::default())); + let _ = App::new().wrap(Condition::new(true, Compat::new(Compress::default()))); + } + } +} From c50eef61664e0614255d02924d43f18c4630e447 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Tue, 31 Aug 2021 04:07:53 +0100 Subject: [PATCH 12/23] "deprecate" calls to NormalizePath::default --- MIGRATION.md | 3 ++- src/middleware/normalize.rs | 24 +++++++++++++++++++++--- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/MIGRATION.md b/MIGRATION.md index 785974366..9a70adb95 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -3,7 +3,8 @@ * The default `NormalizePath` behavior now strips trailing slashes by default. This was previously documented to be the case in v3 but the behavior now matches. The effect is that routes defined with trailing slashes will become inaccessible when - using `NormalizePath::default()`. + using `NormalizePath::default()`. As such, calling `NormalizePath::default()` will log a warning. + It is advised that the `new` method be used instead. Before: `#[get("/test/")]` After: `#[get("/test")]` diff --git a/src/middleware/normalize.rs b/src/middleware/normalize.rs index 219af1c6a..8ad0bb3f0 100644 --- a/src/middleware/normalize.rs +++ b/src/middleware/normalize.rs @@ -59,7 +59,7 @@ impl Default for TrailingSlash { /// /// # actix_web::rt::System::new().block_on(async { /// let app = App::new() -/// .wrap(middleware::NormalizePath::default()) +/// .wrap(middleware::NormalizePath::trim()) /// .route("/test", web::get().to(|| async { "test" })) /// .route("/unmatchable/", web::get().to(|| async { "unmatchable" })); /// @@ -85,13 +85,31 @@ impl Default for TrailingSlash { /// assert_eq!(res.status(), StatusCode::NOT_FOUND); /// # }) /// ``` -#[derive(Debug, Clone, Copy, Default)] +#[derive(Debug, Clone, Copy)] pub struct NormalizePath(TrailingSlash); +impl Default for NormalizePath { + fn default() -> Self { + log::warn!( + "`NormalizePath::default()` is deprecated. The default trailing slash behavior changed \ + in v4 from `Always` to `Trim`. Update your call to `NormalizePath::new(...)`." + ); + + Self(TrailingSlash::Trim) + } +} + impl NormalizePath { /// Create new `NormalizePath` middleware with the specified trailing slash style. pub fn new(trailing_slash_style: TrailingSlash) -> Self { - NormalizePath(trailing_slash_style) + Self(trailing_slash_style) + } + + /// Constructs a new `NormalizePath` middleware with [trim](TrailingSlash::Trim) semantics. + /// + /// Use this instead of `NormalizePath::default()` to avoid deprecation warning. + pub fn trim() -> Self { + Self::new(TrailingSlash::Trim) } } From 7d01ece3556e77c0555f4e7da6c8699d8fc34fb1 Mon Sep 17 00:00:00 2001 From: Ali MJ Al-Nasrawy Date: Tue, 31 Aug 2021 16:15:22 +0300 Subject: [PATCH 13/23] ResourceDef: support multiple-patterns as prefix (#2356) Co-authored-by: Rob Ede --- actix-router/CHANGES.md | 4 + actix-router/src/resource.rs | 253 +++++++++++++++++------------------ 2 files changed, 128 insertions(+), 129 deletions(-) diff --git a/actix-router/CHANGES.md b/actix-router/CHANGES.md index 804f7778d..990382512 100644 --- a/actix-router/CHANGES.md +++ b/actix-router/CHANGES.md @@ -7,6 +7,9 @@ * Improve malformed path error message. [#384] * Prefix segments now always end with with a segment delimiter or end-of-input. [#2355] * Prefix segments with trailing slashes define a trailing empty segment. [#2355] +* Support multi-pattern prefixes and joins. [#2356] +* `ResourceDef::pattern` now returns the first pattern in multi-pattern resources. [#2356] +* Support `build_resource_path` on multi-pattern resources. [#2356] * Minimum supported Rust version (MSRV) is now 1.51. [#378]: https://github.com/actix/actix-net/pull/378 @@ -14,6 +17,7 @@ [#380]: https://github.com/actix/actix-net/pull/380 [#384]: https://github.com/actix/actix-net/pull/384 [#2355]: https://github.com/actix/actix-web/pull/2355 +[#2356]: https://github.com/actix/actix-web/pull/2356 ## 0.5.0-beta.1 - 2021-07-20 diff --git a/actix-router/src/resource.rs b/actix-router/src/resource.rs index 57ce36804..be54336e9 100644 --- a/actix-router/src/resource.rs +++ b/actix-router/src/resource.rs @@ -31,13 +31,13 @@ const REGEX_FLAGS: &str = "(?s-m)"; /// # Pattern Format and Matching Behavior /// /// Resource pattern is defined as a string of zero or more _segments_ where each segment is -/// preceeded by a slash `/`. +/// preceded by a slash `/`. /// /// This means that pattern string __must__ either be empty or begin with a slash (`/`). /// This also implies that a trailing slash in pattern defines an empty segment. /// For example, the pattern `"/user/"` has two segments: `["user", ""]` /// -/// A key point to undertand is that `ResourceDef` matches segments, not strings. +/// A key point to underhand is that `ResourceDef` matches segments, not strings. /// It matches segments individually. /// For example, the pattern `/user/` is not considered a prefix for the path `/user/123/456`, /// because the second segment doesn't match: `["user", ""]` vs `["user", "123", "456"]`. @@ -220,17 +220,15 @@ pub struct ResourceDef { name: Option, /// Pattern that generated the resource definition. - /// - /// `None` when pattern type is `DynamicSet`. patterns: Patterns, + is_prefix: bool, + /// Pattern type. pat_type: PatternType, /// List of segments that compose the pattern, in order. - /// - /// `None` when pattern type is `DynamicSet`. - segments: Option>, + segments: Vec, } #[derive(Debug, Clone, PartialEq)] @@ -248,9 +246,6 @@ enum PatternType { /// Single constant/literal segment. Static(String), - /// Single constant/literal prefix segment. - Prefix(String), - /// Single regular expression and list of dynamic segment names. Dynamic(Regex, Vec<&'static str>), @@ -284,45 +279,7 @@ impl ResourceDef { /// ``` pub fn new(paths: T) -> Self { profile_method!(new); - - match paths.patterns() { - Patterns::Single(pattern) => ResourceDef::from_single_pattern(&pattern, false), - - // since zero length pattern sets are possible - // just return a useless `ResourceDef` - Patterns::List(patterns) if patterns.is_empty() => ResourceDef { - id: 0, - name: None, - patterns: Patterns::List(patterns), - pat_type: PatternType::DynamicSet(RegexSet::empty(), Vec::new()), - segments: None, - }, - - Patterns::List(patterns) => { - let mut re_set = Vec::with_capacity(patterns.len()); - let mut pattern_data = Vec::new(); - - for pattern in &patterns { - match ResourceDef::parse(pattern, false, true) { - (PatternType::Dynamic(re, names), _) => { - re_set.push(re.as_str().to_owned()); - pattern_data.push((re, names)); - } - _ => unreachable!(), - } - } - - let pattern_re_set = RegexSet::new(re_set).unwrap(); - - ResourceDef { - id: 0, - name: None, - patterns: Patterns::List(patterns), - pat_type: PatternType::DynamicSet(pattern_re_set, pattern_data), - segments: None, - } - } - } + Self::new2(paths, false) } /// Constructs a new resource definition using a pattern that performs prefix matching. @@ -348,9 +305,9 @@ impl ResourceDef { /// assert!(!resource.is_match("user/123/stars")); /// assert!(!resource.is_match("/foo")); /// ``` - pub fn prefix(path: &str) -> Self { + pub fn prefix(paths: T) -> Self { profile_method!(prefix); - ResourceDef::from_single_pattern(path, true) + ResourceDef::new2(paths, true) } /// Constructs a new resource definition using a string pattern that performs prefix matching, @@ -375,7 +332,7 @@ impl ResourceDef { /// ``` pub fn root_prefix(path: &str) -> Self { profile_method!(root_prefix); - ResourceDef::prefix(&insert_slash(path)) + ResourceDef::prefix(insert_slash(path).into_owned()) } /// Returns a numeric resource ID. @@ -453,17 +410,14 @@ impl ResourceDef { /// assert!(!ResourceDef::new("/user").is_prefix()); /// ``` pub fn is_prefix(&self) -> bool { - match &self.pat_type { - PatternType::Prefix(_) => true, - PatternType::Dynamic(re, _) if !re.as_str().ends_with('$') => true, - _ => false, - } + self.is_prefix } /// Returns the pattern string that generated the resource definition. /// - /// Returns `None` if definition was constructed with multiple patterns. - /// See [`patterns_iter`][Self::pattern_iter]. + /// If definition is constructed with multiple patterns, the first pattern is returned. To get + /// all patterns, use [`patterns_iter`][Self::pattern_iter]. If resource has 0 patterns, + /// returns `None`. /// /// # Examples /// ``` @@ -472,11 +426,11 @@ impl ResourceDef { /// assert_eq!(resource.pattern().unwrap(), "/user/{id}"); /// /// let mut resource = ResourceDef::new(["/profile", "/user/{id}"]); - /// assert!(resource.pattern().is_none()); + /// assert_eq!(resource.pattern(), Some("/profile")); pub fn pattern(&self) -> Option<&str> { match &self.patterns { Patterns::Single(pattern) => Some(pattern.as_str()), - Patterns::List(_) => None, + Patterns::List(patterns) => patterns.first().map(AsRef::as_ref), } } @@ -563,8 +517,8 @@ impl ResourceDef { .collect::>(); match patterns.len() { - 1 => ResourceDef::from_single_pattern(&patterns[0], other.is_prefix()), - _ => ResourceDef::new(patterns), + 1 => ResourceDef::new2(&patterns[0], other.is_prefix()), + _ => ResourceDef::new2(patterns, other.is_prefix()), } } @@ -609,11 +563,10 @@ impl ResourceDef { // `self.find_match(path).is_some()` // but this skips some checks and uses potentially faster regex methods - match self.pat_type { - PatternType::Static(ref s) => s == path, - PatternType::Prefix(ref prefix) => is_prefix(prefix, path), - PatternType::Dynamic(ref re, _) => re.is_match(path), - PatternType::DynamicSet(ref re, _) => re.is_match(path), + match &self.pat_type { + PatternType::Static(pattern) => self.static_match(pattern, path).is_some(), + PatternType::Dynamic(re, _) => re.is_match(path), + PatternType::DynamicSet(re, _) => re.is_match(path), } } @@ -656,11 +609,7 @@ impl ResourceDef { profile_method!(find_match); match &self.pat_type { - PatternType::Static(segment) if path == segment => Some(segment.len()), - PatternType::Static(_) => None, - - PatternType::Prefix(prefix) if is_prefix(prefix, path) => Some(prefix.len()), - PatternType::Prefix(_) => None, + PatternType::Static(pattern) => self.static_match(pattern, path), PatternType::Dynamic(re, _) => Some(re.captures(path)?[1].len()), @@ -753,10 +702,10 @@ impl ResourceDef { let path_str = path.path(); let (matched_len, matched_vars) = match &self.pat_type { - PatternType::Static(_) | PatternType::Prefix(_) => { + PatternType::Static(pattern) => { profile_section!(pattern_static_or_prefix); - match self.find_match(path_str) { + match self.static_match(pattern, path_str) { Some(len) => (len, None), None => return false, } @@ -844,13 +793,10 @@ impl ResourceDef { F: FnMut(&str) -> Option, I: AsRef, { - for el in match self.segments { - Some(ref segments) => segments, - None => return false, - } { - match *el { - PatternSegment::Const(ref val) => path.push_str(val), - PatternSegment::Var(ref name) => match vars(name) { + for segment in &self.segments { + match segment { + PatternSegment::Const(val) => path.push_str(val), + PatternSegment::Var(name) => match vars(name) { Some(val) => path.push_str(val.as_ref()), _ => return false, }, @@ -864,8 +810,8 @@ impl ResourceDef { /// /// Returns `true` on success. /// - /// Resource paths can not be built from multi-pattern resources; this call will always return - /// false and will not add anything to the string buffer. + /// For multi-pattern resources, the first pattern is used under the assumption that it would be + /// equivalent to any other choice. /// /// # Examples /// ``` @@ -890,8 +836,8 @@ impl ResourceDef { /// /// Returns `true` on success. /// - /// Resource paths can not be built from multi-pattern resources; this call will always return - /// false and will not add anything to the string buffer. + /// For multi-pattern resources, the first pattern is used under the assumption that it would be + /// equivalent to any other choice. /// /// # Examples /// ``` @@ -921,19 +867,69 @@ impl ResourceDef { self.build_resource_path(path, |name| values.get(name).map(AsRef::::as_ref)) } - /// Parse path pattern and create a new instance. - fn from_single_pattern(pattern: &str, is_prefix: bool) -> Self { - profile_method!(from_single_pattern); + /// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`. + fn static_match(&self, pattern: &str, path: &str) -> Option { + let rem = path.strip_prefix(pattern)?; - let pattern = pattern.to_owned(); - let (pat_type, segments) = ResourceDef::parse(&pattern, is_prefix, false); + match self.is_prefix { + // resource is not a prefix so an exact match is needed + false if rem.is_empty() => Some(pattern.len()), + + // resource is a prefix so rem should start with a path delimiter + true if rem.is_empty() || rem.starts_with('/') => Some(pattern.len()), + + // otherwise, no match + _ => None, + } + } + + fn new2(paths: T, is_prefix: bool) -> Self { + profile_method!(new2); + + let patterns = paths.patterns(); + let (pat_type, segments) = match &patterns { + Patterns::Single(pattern) => ResourceDef::parse(pattern, is_prefix, false), + + // since zero length pattern sets are possible + // just return a useless `ResourceDef` + Patterns::List(patterns) if patterns.is_empty() => ( + PatternType::DynamicSet(RegexSet::empty(), Vec::new()), + Vec::new(), + ), + + Patterns::List(patterns) => { + let mut re_set = Vec::with_capacity(patterns.len()); + let mut pattern_data = Vec::new(); + let mut segments = None; + + for pattern in patterns { + match ResourceDef::parse(pattern, is_prefix, true) { + (PatternType::Dynamic(re, names), segs) => { + re_set.push(re.as_str().to_owned()); + pattern_data.push((re, names)); + segments.get_or_insert(segs); + } + _ => unreachable!(), + } + } + + let pattern_re_set = RegexSet::new(re_set).unwrap(); + let segments = segments.unwrap_or_else(Vec::new); + + ( + PatternType::DynamicSet(pattern_re_set, pattern_data), + segments, + ) + } + }; ResourceDef { id: 0, name: None, - patterns: Patterns::Single(pattern), + patterns, + is_prefix, pat_type, - segments: Some(segments), + segments, } } @@ -1023,20 +1019,15 @@ impl ResourceDef { ) -> (PatternType, Vec) { profile_method!(parse); - let mut unprocessed = pattern; - - if !force_dynamic && unprocessed.find('{').is_none() && !unprocessed.ends_with('*') { + if !force_dynamic && pattern.find('{').is_none() && !pattern.ends_with('*') { // pattern is static - - let tp = if is_prefix { - PatternType::Prefix(unprocessed.to_owned()) - } else { - PatternType::Static(unprocessed.to_owned()) - }; - - return (tp, vec![PatternSegment::Const(unprocessed.to_owned())]); + return ( + PatternType::Static(pattern.to_owned()), + vec![PatternSegment::Const(pattern.to_owned())], + ); } + let mut unprocessed = pattern; let mut segments = Vec::new(); let mut re = format!("{}^", REGEX_FLAGS); let mut dyn_segment_count = 0; @@ -1137,18 +1128,7 @@ impl Eq for ResourceDef {} impl PartialEq for ResourceDef { fn eq(&self, other: &ResourceDef) -> bool { - self.patterns == other.patterns - && match &self.pat_type { - PatternType::Static(_) => matches!(&other.pat_type, PatternType::Static(_)), - PatternType::Prefix(_) => matches!(&other.pat_type, PatternType::Prefix(_)), - PatternType::Dynamic(re, _) => match &other.pat_type { - PatternType::Dynamic(other_re, _) => re.as_str() == other_re.as_str(), - _ => false, - }, - PatternType::DynamicSet(_, _) => { - matches!(&other.pat_type, PatternType::DynamicSet(..)) - } - } + self.patterns == other.patterns && self.is_prefix == other.is_prefix } } @@ -1183,15 +1163,6 @@ pub(crate) fn insert_slash(path: &str) -> Cow<'_, str> { } } -/// Returns true if `prefix` acts as a proper prefix (i.e., separated by a slash) in `path`. -fn is_prefix(prefix: &str, path: &str) -> bool { - match path.strip_prefix(prefix) { - // Ensure the match ends at segment boundary - Some(rem) if rem.is_empty() || rem.starts_with('/') => true, - _ => false, - } -} - #[cfg(test)] mod tests { use super::*; @@ -1376,6 +1347,24 @@ mod tests { assert!(!re.is_match("/user/2345/sdg")); } + #[test] + fn dynamic_set_prefix() { + let re = ResourceDef::prefix(vec!["/u/{id}", "/{id:[[:digit:]]{3}}"]); + + assert_eq!(re.find_match("/u/abc"), Some(6)); + assert_eq!(re.find_match("/u/abc/123"), Some(6)); + assert_eq!(re.find_match("/s/user/profile"), None); + + assert_eq!(re.find_match("/123"), Some(4)); + assert_eq!(re.find_match("/123/456"), Some(4)); + assert_eq!(re.find_match("/12345"), None); + + let mut path = Path::new("/151/res"); + assert!(re.capture_match_info(&mut path)); + assert_eq!(path.get("id").unwrap(), "151"); + assert_eq!(path.unprocessed(), "/res"); + } + #[test] fn parse_tail() { let re = ResourceDef::new("/user/-{id}*"); @@ -1602,10 +1591,11 @@ mod tests { } #[test] - fn multi_pattern_cannot_build_path() { + fn multi_pattern_build_path() { let resource = ResourceDef::new(["/user/{id}", "/profile/{id}"]); let mut s = String::new(); - assert!(!resource.resource_path_from_iter(&mut s, &mut ["123"].iter())); + assert!(resource.resource_path_from_iter(&mut s, &mut ["123"].iter())); + assert_eq!(s, "/user/123"); } #[test] @@ -1738,8 +1728,12 @@ mod tests { join_test!("", "" => "", "/hello", "/"); join_test!("/user", "" => "", "/user", "/user/123", "/user11", "user", "user/123"); - join_test!("", "/user"=> "", "/user", "foo", "/user11", "user", "user/123"); - join_test!("/user", "/xx"=> "", "", "/", "/user", "/xx", "/userxx", "/user/xx"); + join_test!("", "/user" => "", "/user", "foo", "/user11", "user", "user/123"); + join_test!("/user", "/xx" => "", "", "/", "/user", "/xx", "/userxx", "/user/xx"); + + join_test!(["/ver/{v}", "/v{v}"], ["/req/{req}", "/{req}"] => "/v1/abc", + "/ver/1/abc", "/v1/req/abc", "/ver/1/req/abc", "/v1/abc/def", + "/ver1/req/abc/def", "", "/", "/v1/"); } #[test] @@ -1777,6 +1771,7 @@ mod tests { match_methods_agree!(prefix "" => "", "/", "/foo"); match_methods_agree!(prefix "/user" => "user", "/user", "/users", "/user/123", "/foo"); match_methods_agree!(prefix r"/id/{id:\d{3}}" => "/id/123", "/id/1234"); + match_methods_agree!(["/v{v}", "/ver/{v}"] => "", "s/v", "/v1", "/v1/xx", "/ver/i3/5", "/ver/1"); } #[test] From 373b3f91dff58ac6c5b1158206e7380376906541 Mon Sep 17 00:00:00 2001 From: Ali MJ Al-Nasrawy Date: Wed, 1 Sep 2021 06:48:43 +0300 Subject: [PATCH 14/23] rework `ResourceMap` internals (#2337) --- src/app_service.rs | 4 +- src/request.rs | 9 +- src/rmap.rs | 422 ++++++++++++++++++++++++--------------------- 3 files changed, 233 insertions(+), 202 deletions(-) diff --git a/src/app_service.rs b/src/app_service.rs index ce52543b8..cf34b302e 100644 --- a/src/app_service.rs +++ b/src/app_service.rs @@ -79,7 +79,7 @@ where .into_iter() .for_each(|mut srv| srv.register(&mut config)); - let mut rmap = ResourceMap::new(ResourceDef::new("")); + let mut rmap = ResourceMap::new(ResourceDef::prefix("")); let (config, services) = config.into_services(); @@ -104,7 +104,7 @@ where // complete ResourceMap tree creation let rmap = Rc::new(rmap); - rmap.finish(rmap.clone()); + ResourceMap::finish(&rmap); // construct all async data factory futures let factory_futs = join_all(self.async_data_factories.iter().map(|f| f())); diff --git a/src/request.rs b/src/request.rs index 59850b4ca..c25a5397a 100644 --- a/src/request.rs +++ b/src/request.rs @@ -511,7 +511,7 @@ mod tests { let mut res = ResourceDef::new("/user/{name}.{ext}"); res.set_name("index"); - let mut rmap = ResourceMap::new(ResourceDef::new("")); + let mut rmap = ResourceMap::new(ResourceDef::prefix("")); rmap.add(&mut res, None); assert!(rmap.has_resource("/user/test.html")); assert!(!rmap.has_resource("/test/unknown")); @@ -541,7 +541,7 @@ mod tests { let mut rdef = ResourceDef::new("/index.html"); rdef.set_name("index"); - let mut rmap = ResourceMap::new(ResourceDef::new("")); + let mut rmap = ResourceMap::new(ResourceDef::prefix("")); rmap.add(&mut rdef, None); assert!(rmap.has_resource("/index.html")); @@ -562,7 +562,7 @@ mod tests { let mut rdef = ResourceDef::new("/index.html"); rdef.set_name("index"); - let mut rmap = ResourceMap::new(ResourceDef::new("")); + let mut rmap = ResourceMap::new(ResourceDef::prefix("")); rmap.add(&mut rdef, None); assert!(rmap.has_resource("/index.html")); @@ -581,9 +581,8 @@ mod tests { rdef.set_name("youtube"); - let mut rmap = ResourceMap::new(ResourceDef::new("")); + let mut rmap = ResourceMap::new(ResourceDef::prefix("")); rmap.add(&mut rdef, None); - assert!(rmap.has_resource("https://youtube.com/watch/unknown")); let req = TestRequest::default().rmap(rmap).to_http_request(); let url = req.url_for("youtube", &["oHg5SJYRHA0"]); diff --git a/src/rmap.rs b/src/rmap.rs index 0ee4de47e..8466eda28 100644 --- a/src/rmap.rs +++ b/src/rmap.rs @@ -10,43 +10,75 @@ use crate::request::HttpRequest; #[derive(Clone, Debug)] pub struct ResourceMap { - root: ResourceDef, + pattern: ResourceDef, + + /// Named resources within the tree or, for external resources, + /// it points to isolated nodes outside the tree. + named: AHashMap>, + parent: RefCell>, - named: AHashMap, - patterns: Vec<(ResourceDef, Option>)>, + + /// Must be `None` for "edge" nodes. + nodes: Option>>, } impl ResourceMap { + /// Creates a _container_ node in the `ResourceMap` tree. pub fn new(root: ResourceDef) -> Self { ResourceMap { - root, - parent: RefCell::new(Weak::new()), + pattern: root, named: AHashMap::default(), - patterns: Vec::new(), + parent: RefCell::new(Weak::new()), + nodes: Some(Vec::new()), } } + /// Adds a (possibly nested) resource. + /// + /// To add a non-prefix pattern, `nested` must be `None`. + /// To add external resource, supply a pattern without a leading `/`. + /// The root pattern of `nested`, if present, should match `pattern`. pub fn add(&mut self, pattern: &mut ResourceDef, nested: Option>) { - pattern.set_id(self.patterns.len() as u16); - self.patterns.push((pattern.clone(), nested)); - if let Some(name) = pattern.name() { - self.named.insert(name.to_owned(), pattern.clone()); + pattern.set_id(self.nodes.as_ref().unwrap().len() as u16); + + if let Some(new_node) = nested { + assert_eq!(&new_node.pattern, pattern, "`patern` and `nested` mismatch"); + self.named.extend(new_node.named.clone().into_iter()); + self.nodes.as_mut().unwrap().push(new_node); + } else { + let new_node = Rc::new(ResourceMap { + pattern: pattern.clone(), + named: AHashMap::default(), + parent: RefCell::new(Weak::new()), + nodes: None, + }); + + if let Some(name) = pattern.name() { + self.named.insert(name.to_owned(), Rc::clone(&new_node)); + } + + let is_external = match pattern.pattern() { + Some(p) => !p.is_empty() && !p.starts_with('/'), + None => false, + }; + + // Don't add external resources to the tree + if !is_external { + self.nodes.as_mut().unwrap().push(new_node); + } } } - pub(crate) fn finish(&self, current: Rc) { - for (_, nested) in &self.patterns { - if let Some(ref nested) = nested { - *nested.parent.borrow_mut() = Rc::downgrade(¤t); - nested.finish(nested.clone()); - } + pub(crate) fn finish(self: &Rc) { + for node in self.nodes.iter().flatten() { + node.parent.replace(Rc::downgrade(self)); + ResourceMap::finish(node); } } /// Generate url for named resource /// - /// Check [`HttpRequest::url_for()`](../struct.HttpRequest.html#method. - /// url_for) for detailed information. + /// Check [`HttpRequest::url_for`] for detailed information. pub fn url_for( &self, req: &HttpRequest, @@ -57,197 +89,97 @@ impl ResourceMap { U: IntoIterator, I: AsRef, { - let mut path = String::new(); let mut elements = elements.into_iter(); - if self.patterns_for(name, &mut path, &mut elements)?.is_some() { - if path.starts_with('/') { - let conn = req.connection_info(); - Ok(Url::parse(&format!( - "{}://{}{}", - conn.scheme(), - conn.host(), - path - ))?) - } else { - Ok(Url::parse(&path)?) - } + let path = self + .named + .get(name) + .ok_or(UrlGenerationError::ResourceNotFound)? + .root_rmap_fn(String::with_capacity(24), |mut acc, node| { + node.pattern + .resource_path_from_iter(&mut acc, &mut elements) + .then(|| acc) + }) + .ok_or(UrlGenerationError::NotEnoughElements)?; + + if path.starts_with('/') { + let conn = req.connection_info(); + Ok(Url::parse(&format!( + "{}://{}{}", + conn.scheme(), + conn.host(), + path + ))?) } else { - Err(UrlGenerationError::ResourceNotFound) + Ok(Url::parse(&path)?) } } pub fn has_resource(&self, path: &str) -> bool { - let path = if path.is_empty() { "/" } else { path }; - - for (pattern, rmap) in &self.patterns { - if let Some(ref rmap) = rmap { - if let Some(pat_len) = pattern.find_match(path) { - return rmap.has_resource(&path[pat_len..]); - } - } else if pattern.is_match(path) || pattern.pattern() == Some("") && path == "/" { - return true; - } - } - false + self.find_matching_node(path).is_some() } /// Returns the name of the route that matches the given path or None if no full match - /// is possible. + /// is possible or the matching resource is not named. pub fn match_name(&self, path: &str) -> Option<&str> { - let path = if path.is_empty() { "/" } else { path }; - - for (pattern, rmap) in &self.patterns { - if let Some(ref rmap) = rmap { - if let Some(plen) = pattern.find_match(path) { - return rmap.match_name(&path[plen..]); - } - } else if pattern.is_match(path) { - return pattern.name(); - } - } - - None + self.find_matching_node(path)?.pattern.name() } /// Returns the full resource pattern matched against a path or None if no full match /// is possible. pub fn match_pattern(&self, path: &str) -> Option { - let path = if path.is_empty() { "/" } else { path }; - - // ensure a full match exists - if !self.has_resource(path) { - return None; - } - - Some(self.traverse_resource_pattern(path)) + self.find_matching_node(path)?.root_rmap_fn( + String::with_capacity(24), + |mut acc, node| { + acc.push_str(node.pattern.pattern()?); + Some(acc) + }, + ) } - /// Takes remaining path and tries to match it up against a resource definition within the - /// current resource map recursively, returning a concatenation of all resource prefixes and - /// patterns matched in the tree. - /// - /// Should only be used after checking the resource exists in the map so that partial match - /// patterns are not returned. - fn traverse_resource_pattern(&self, remaining: &str) -> String { - for (pattern, rmap) in &self.patterns { - if let Some(ref rmap) = rmap { - if let Some(prefix_len) = pattern.find_match(remaining) { - // TODO: think about unwrap_or - let prefix = pattern.pattern().unwrap_or("").to_owned(); - - return [ - prefix, - rmap.traverse_resource_pattern(&remaining[prefix_len..]), - ] - .concat(); - } - } else if pattern.is_match(remaining) { - // TODO: think about unwrap_or - return pattern.pattern().unwrap_or("").to_owned(); - } - } - - String::new() + fn find_matching_node(&self, path: &str) -> Option<&ResourceMap> { + self._find_matching_node(path).flatten() } - fn patterns_for( - &self, - name: &str, - path: &mut String, - elements: &mut U, - ) -> Result, UrlGenerationError> + /// Returns `None` if root pattern doesn't match; + /// `Some(None)` if root pattern matches but there is no matching child pattern. + /// Don't search sideways when `Some(none)` is returned. + fn _find_matching_node(&self, path: &str) -> Option> { + let matched_len = self.pattern.find_match(path)?; + let path = &path[matched_len..]; + + Some(match &self.nodes { + // find first sub-node to match remaining path + Some(nodes) => nodes + .iter() + .filter_map(|node| node._find_matching_node(path)) + .next() + .flatten(), + + // only terminate at edge nodes + None => Some(self), + }) + } + + /// Find `self`'s highest ancestor and then run `F`, providing `B`, in that rmap context. + fn root_rmap_fn(&self, init: B, mut f: F) -> Option where - U: Iterator, - I: AsRef, + F: FnMut(B, &ResourceMap) -> Option, { - if self.pattern_for(name, path, elements)?.is_some() { - Ok(Some(())) - } else { - self.parent_pattern_for(name, path, elements) - } + self._root_rmap_fn(init, &mut f) } - fn pattern_for( - &self, - name: &str, - path: &mut String, - elements: &mut U, - ) -> Result, UrlGenerationError> + /// Run `F`, providing `B`, if `self` is top-level resource map, else recurse to parent map. + fn _root_rmap_fn(&self, init: B, f: &mut F) -> Option where - U: Iterator, - I: AsRef, + F: FnMut(B, &ResourceMap) -> Option, { - if let Some(pattern) = self.named.get(name) { - if pattern - .pattern() - .map(|pat| pat.starts_with('/')) - .unwrap_or(false) - { - self.fill_root(path, elements)?; - } + let data = match self.parent.borrow().upgrade() { + Some(ref parent) => parent._root_rmap_fn(init, f)?, + None => init, + }; - if pattern.resource_path_from_iter(path, elements) { - Ok(Some(())) - } else { - Err(UrlGenerationError::NotEnoughElements) - } - } else { - for (_, rmap) in &self.patterns { - if let Some(ref rmap) = rmap { - if rmap.pattern_for(name, path, elements)?.is_some() { - return Ok(Some(())); - } - } - } - Ok(None) - } - } - - fn fill_root( - &self, - path: &mut String, - elements: &mut U, - ) -> Result<(), UrlGenerationError> - where - U: Iterator, - I: AsRef, - { - if let Some(ref parent) = self.parent.borrow().upgrade() { - parent.fill_root(path, elements)?; - } - - if self.root.resource_path_from_iter(path, elements) { - Ok(()) - } else { - Err(UrlGenerationError::NotEnoughElements) - } - } - - fn parent_pattern_for( - &self, - name: &str, - path: &mut String, - elements: &mut U, - ) -> Result, UrlGenerationError> - where - U: Iterator, - I: AsRef, - { - if let Some(ref parent) = self.parent.borrow().upgrade() { - if let Some(pattern) = parent.named.get(name) { - self.fill_root(path, elements)?; - if pattern.resource_path_from_iter(path, elements) { - Ok(Some(())) - } else { - Err(UrlGenerationError::NotEnoughElements) - } - } else { - parent.parent_pattern_for(name, path, elements) - } - } else { - Ok(None) - } + f(data, self) } } @@ -259,7 +191,7 @@ mod tests { fn extract_matched_pattern() { let mut root = ResourceMap::new(ResourceDef::root_prefix("")); - let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); + let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}")); user_map.add(&mut ResourceDef::new("/"), None); user_map.add(&mut ResourceDef::new("/profile"), None); user_map.add(&mut ResourceDef::new("/article/{id}"), None); @@ -275,9 +207,10 @@ mod tests { &mut ResourceDef::root_prefix("/user/{id}"), Some(Rc::new(user_map)), ); + root.add(&mut ResourceDef::new("/info"), None); let root = Rc::new(root); - root.finish(Rc::clone(&root)); + ResourceMap::finish(&root); // sanity check resource map setup @@ -288,7 +221,7 @@ mod tests { assert!(root.has_resource("/v2")); assert!(!root.has_resource("/v33")); - assert!(root.has_resource("/user/22")); + assert!(!root.has_resource("/user/22")); assert!(root.has_resource("/user/22/")); assert!(root.has_resource("/user/22/profile")); @@ -336,7 +269,7 @@ mod tests { rdef.set_name("root_info"); root.add(&mut rdef, None); - let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); + let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}")); let mut rdef = ResourceDef::new("/"); user_map.add(&mut rdef, None); @@ -350,14 +283,14 @@ mod tests { ); let root = Rc::new(root); - root.finish(Rc::clone(&root)); + ResourceMap::finish(&root); // sanity check resource map setup assert!(root.has_resource("/info")); assert!(!root.has_resource("/bar")); - assert!(root.has_resource("/user/22")); + assert!(!root.has_resource("/user/22")); assert!(root.has_resource("/user/22/")); assert!(root.has_resource("/user/22/post/55")); @@ -377,7 +310,7 @@ mod tests { // ref: https://github.com/actix/actix-web/issues/1582 let mut root = ResourceMap::new(ResourceDef::root_prefix("")); - let mut user_map = ResourceMap::new(ResourceDef::root_prefix("")); + let mut user_map = ResourceMap::new(ResourceDef::root_prefix("/user/{id}")); user_map.add(&mut ResourceDef::new("/"), None); user_map.add(&mut ResourceDef::new("/profile"), None); user_map.add(&mut ResourceDef::new("/article/{id}"), None); @@ -393,20 +326,119 @@ mod tests { ); let root = Rc::new(root); - root.finish(Rc::clone(&root)); + ResourceMap::finish(&root); // check root has no parent assert!(root.parent.borrow().upgrade().is_none()); // check child has parent reference - assert!(root.patterns[0].1.is_some()); + assert!(root.nodes.as_ref().unwrap()[0] + .parent + .borrow() + .upgrade() + .is_some()); // check child's parent root id matches root's root id - assert_eq!( - root.patterns[0].1.as_ref().unwrap().root.id(), - root.root.id() - ); + assert!(Rc::ptr_eq( + &root.nodes.as_ref().unwrap()[0] + .parent + .borrow() + .upgrade() + .unwrap(), + &root + )); let output = format!("{:?}", root); assert!(output.starts_with("ResourceMap {")); assert!(output.ends_with(" }")); } + + #[test] + fn short_circuit() { + let mut root = ResourceMap::new(ResourceDef::prefix("")); + + let mut user_root = ResourceDef::prefix("/user"); + let mut user_map = ResourceMap::new(user_root.clone()); + user_map.add(&mut ResourceDef::new("/u1"), None); + user_map.add(&mut ResourceDef::new("/u2"), None); + + root.add(&mut ResourceDef::new("/user/u3"), None); + root.add(&mut user_root, Some(Rc::new(user_map))); + root.add(&mut ResourceDef::new("/user/u4"), None); + + let rmap = Rc::new(root); + ResourceMap::finish(&rmap); + + assert!(rmap.has_resource("/user/u1")); + assert!(rmap.has_resource("/user/u2")); + assert!(rmap.has_resource("/user/u3")); + assert!(!rmap.has_resource("/user/u4")); + } + + #[test] + fn url_for() { + let mut root = ResourceMap::new(ResourceDef::prefix("")); + + let mut user_scope_rdef = ResourceDef::prefix("/user"); + let mut user_scope_map = ResourceMap::new(user_scope_rdef.clone()); + + let mut user_rdef = ResourceDef::new("/{user_id}"); + let mut user_map = ResourceMap::new(user_rdef.clone()); + + let mut post_rdef = ResourceDef::new("/post/{sub_id}"); + post_rdef.set_name("post"); + + user_map.add(&mut post_rdef, None); + user_scope_map.add(&mut user_rdef, Some(Rc::new(user_map))); + root.add(&mut user_scope_rdef, Some(Rc::new(user_scope_map))); + + let rmap = Rc::new(root); + ResourceMap::finish(&rmap); + + let mut req = crate::test::TestRequest::default(); + req.set_server_hostname("localhost:8888"); + let req = req.to_http_request(); + + let url = rmap + .url_for(&req, "post", &["u123", "foobar"]) + .unwrap() + .to_string(); + assert_eq!(url, "http://localhost:8888/user/u123/post/foobar"); + + assert!(rmap.url_for(&req, "missing", &["u123"]).is_err()); + } + + #[test] + fn external_resource_with_no_name() { + let mut root = ResourceMap::new(ResourceDef::prefix("")); + + let mut rdef = ResourceDef::new("https://duck.com/{query}"); + root.add(&mut rdef, None); + + let rmap = Rc::new(root); + ResourceMap::finish(&rmap); + + assert!(!rmap.has_resource("https://duck.com/abc")); + } + + #[test] + fn external_resource_with_name() { + let mut root = ResourceMap::new(ResourceDef::prefix("")); + + let mut rdef = ResourceDef::new("https://duck.com/{query}"); + rdef.set_name("duck"); + root.add(&mut rdef, None); + + let rmap = Rc::new(root); + ResourceMap::finish(&rmap); + + assert!(!rmap.has_resource("https://duck.com/abc")); + + let mut req = crate::test::TestRequest::default(); + req.set_server_hostname("localhost:8888"); + let req = req.to_http_request(); + + assert_eq!( + rmap.url_for(&req, "duck", &["abcd"]).unwrap().to_string(), + "https://duck.com/abcd" + ); + } } From ddc8c16cb34c9730b9a4922413fbf027e401052d Mon Sep 17 00:00:00 2001 From: Arthur Le Moigne Date: Wed, 1 Sep 2021 10:08:29 +0200 Subject: [PATCH 15/23] Fix quality parse error in Accept-Encoding HTTP header (#2344) --- CHANGES.md | 7 +- actix-http/CHANGES.md | 5 +- actix-http/src/encoding/decoder.rs | 3 +- .../src/header/shared/content_encoding.rs | 56 ++--- actix-http/src/header/shared/quality_item.rs | 29 ++- src/middleware/compress.rs | 217 +++++++++++++++--- tests/test_server.rs | 19 ++ 7 files changed, 259 insertions(+), 77 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 5325caf48..217ec4f78 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,10 +4,15 @@ ### Added * Re-export actix-service `ServiceFactory` in `dev` module. [#2325] -### Changes +### Changed * Minimum supported Rust version (MSRV) is now 1.51. +* Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344] + +### Fixed +* Fix quality parse error in Accept-Encoding header. [#2344] [#2325]: https://github.com/actix/actix-web/pull/2325 +[#2344]: https://github.com/actix/actix-web/pull/2344 ## 4.0.0-beta.8 - 2021-06-26 diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 63172e56d..f4efef54a 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -1,22 +1,23 @@ # Changes ## Unreleased - 2021-xx-xx -### Changes +### Changed * Minimum supported Rust version (MSRV) is now 1.51. ### Fixed * Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364] * Remove `Into` bound on `Encoder` body types. [#2375] +* Fix quality parse error in Accept-Encoding header. [#2344] [#2364]: https://github.com/actix/actix-web/pull/2364 [#2375]: https://github.com/actix/actix-web/pull/2375 +[#2344]: https://github.com/actix/actix-web/pull/2344 ## 3.0.0-beta.8 - 2021-08-09 ### Fixed * Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) - ## 3.0.0-beta.8 - 2021-06-26 ### Changed * Change compression algorithm features flags. [#2250] diff --git a/actix-http/src/encoding/decoder.rs b/actix-http/src/encoding/decoder.rs index d3e304836..81e97d916 100644 --- a/actix-http/src/encoding/decoder.rs +++ b/actix-http/src/encoding/decoder.rs @@ -1,6 +1,7 @@ //! Stream decoders. use std::{ + convert::TryFrom, future::Future, io::{self, Write as _}, pin::Pin, @@ -80,7 +81,7 @@ where let encoding = headers .get(&CONTENT_ENCODING) .and_then(|val| val.to_str().ok()) - .map(ContentEncoding::from) + .and_then(|x| ContentEncoding::try_from(x).ok()) .unwrap_or(ContentEncoding::Identity); Self::new(stream, encoding) diff --git a/actix-http/src/header/shared/content_encoding.rs b/actix-http/src/header/shared/content_encoding.rs index b9c1d2795..375e8c2fa 100644 --- a/actix-http/src/header/shared/content_encoding.rs +++ b/actix-http/src/header/shared/content_encoding.rs @@ -1,4 +1,4 @@ -use std::{convert::Infallible, str::FromStr}; +use std::{convert::TryFrom, error, fmt, str::FromStr}; use http::header::InvalidHeaderValue; @@ -8,6 +8,20 @@ use crate::{ HttpMessage, }; +/// Error return when a content encoding is unknown. +/// +/// Example: 'compress' +#[derive(Debug)] +pub struct ContentEncodingParseError; + +impl fmt::Display for ContentEncodingParseError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Unsupported content encoding") + } +} + +impl error::Error for ContentEncodingParseError {} + /// Represents a supported content encoding. #[derive(Copy, Clone, PartialEq, Debug)] pub enum ContentEncoding { @@ -37,7 +51,7 @@ impl ContentEncoding { matches!(self, ContentEncoding::Identity | ContentEncoding::Auto) } - /// Convert content encoding to string + /// Convert content encoding to string. #[inline] pub fn as_str(self) -> &'static str { match self { @@ -48,18 +62,6 @@ impl ContentEncoding { ContentEncoding::Identity | ContentEncoding::Auto => "identity", } } - - /// Default Q-factor (quality) value. - #[inline] - pub fn quality(self) -> f64 { - match self { - ContentEncoding::Br => 1.1, - ContentEncoding::Gzip => 1.0, - ContentEncoding::Deflate => 0.9, - ContentEncoding::Identity | ContentEncoding::Auto => 0.1, - ContentEncoding::Zstd => 0.0, - } - } } impl Default for ContentEncoding { @@ -69,31 +71,33 @@ impl Default for ContentEncoding { } impl FromStr for ContentEncoding { - type Err = Infallible; + type Err = ContentEncodingParseError; fn from_str(val: &str) -> Result { - Ok(Self::from(val)) - } -} - -impl From<&str> for ContentEncoding { - fn from(val: &str) -> ContentEncoding { let val = val.trim(); if val.eq_ignore_ascii_case("br") { - ContentEncoding::Br + Ok(ContentEncoding::Br) } else if val.eq_ignore_ascii_case("gzip") { - ContentEncoding::Gzip + Ok(ContentEncoding::Gzip) } else if val.eq_ignore_ascii_case("deflate") { - ContentEncoding::Deflate + Ok(ContentEncoding::Deflate) } else if val.eq_ignore_ascii_case("zstd") { - ContentEncoding::Zstd + Ok(ContentEncoding::Zstd) } else { - ContentEncoding::default() + Err(ContentEncodingParseError) } } } +impl TryFrom<&str> for ContentEncoding { + type Error = ContentEncodingParseError; + + fn try_from(val: &str) -> Result { + val.parse() + } +} + impl IntoHeaderValue for ContentEncoding { type Error = InvalidHeaderValue; diff --git a/actix-http/src/header/shared/quality_item.rs b/actix-http/src/header/shared/quality_item.rs index 240a0afa2..63fa02e7b 100644 --- a/actix-http/src/header/shared/quality_item.rs +++ b/actix-http/src/header/shared/quality_item.rs @@ -1,11 +1,14 @@ use std::{ cmp, convert::{TryFrom, TryInto}, - fmt, str, + fmt, + str::{self, FromStr}, }; use derive_more::{Display, Error}; +use crate::error::ParseError; + const MAX_QUALITY: u16 = 1000; const MAX_FLOAT_QUALITY: f32 = 1.0; @@ -113,12 +116,12 @@ impl fmt::Display for QualityItem { } } -impl str::FromStr for QualityItem { - type Err = crate::error::ParseError; +impl FromStr for QualityItem { + type Err = ParseError; - fn from_str(qitem_str: &str) -> Result, crate::error::ParseError> { + fn from_str(qitem_str: &str) -> Result { if !qitem_str.is_ascii() { - return Err(crate::error::ParseError::Header); + return Err(ParseError::Header); } // Set defaults used if parsing fails. @@ -139,7 +142,7 @@ impl str::FromStr for QualityItem { if parts[0].len() < 2 { // Can't possibly be an attribute since an attribute needs at least a name followed // by an equals sign. And bare identifiers are forbidden. - return Err(crate::error::ParseError::Header); + return Err(ParseError::Header); } let start = &parts[0][0..2]; @@ -148,25 +151,21 @@ impl str::FromStr for QualityItem { let q_val = &parts[0][2..]; if q_val.len() > 5 { // longer than 5 indicates an over-precise q-factor - return Err(crate::error::ParseError::Header); + return Err(ParseError::Header); } - let q_value = q_val - .parse::() - .map_err(|_| crate::error::ParseError::Header)?; + let q_value = q_val.parse::().map_err(|_| ParseError::Header)?; if (0f32..=1f32).contains(&q_value) { quality = q_value; raw_item = parts[1]; } else { - return Err(crate::error::ParseError::Header); + return Err(ParseError::Header); } } } - let item = raw_item - .parse::() - .map_err(|_| crate::error::ParseError::Header)?; + let item = raw_item.parse::().map_err(|_| ParseError::Header)?; // we already checked above that the quality is within range Ok(QualityItem::new(item, Quality::from_f32(quality))) @@ -224,7 +223,7 @@ mod tests { } } - impl str::FromStr for Encoding { + impl FromStr for Encoding { type Err = crate::error::ParseError; fn from_str(s: &str) -> Result { use Encoding::*; diff --git a/src/middleware/compress.rs b/src/middleware/compress.rs index a9128bc47..0e61a8e7e 100644 --- a/src/middleware/compress.rs +++ b/src/middleware/compress.rs @@ -2,10 +2,10 @@ use std::{ cmp, + convert::TryFrom, future::Future, marker::PhantomData, pin::Pin, - str::FromStr, task::{Context, Poll}, }; @@ -13,16 +13,18 @@ use actix_http::{ body::{MessageBody, ResponseBody}, encoding::Encoder, http::header::{ContentEncoding, ACCEPT_ENCODING}, + StatusCode, }; use actix_service::{Service, Transform}; -use actix_utils::future::{ok, Ready}; +use actix_utils::future::{ok, Either, Ready}; use futures_core::ready; +use once_cell::sync::Lazy; use pin_project::pin_project; use crate::{ dev::BodyEncoding, service::{ServiceRequest, ServiceResponse}, - Error, + Error, HttpResponse, }; /// Middleware for compressing response payloads. @@ -78,34 +80,78 @@ pub struct CompressMiddleware { encoding: ContentEncoding, } +static SUPPORTED_ALGORITHM_NAMES: Lazy = Lazy::new(|| { + let mut encoding = vec![]; + + #[cfg(feature = "compress-brotli")] + { + encoding.push("br"); + } + + #[cfg(feature = "compress-gzip")] + { + encoding.push("gzip"); + encoding.push("deflate"); + } + + #[cfg(feature = "compress-zstd")] + encoding.push("zstd"); + + assert!( + !encoding.is_empty(), + "encoding can not be empty unless __compress feature has been explicitly enabled by itself" + ); + + encoding.join(", ") +}); + impl Service for CompressMiddleware where - B: MessageBody, S: Service, Error = Error>, + B: MessageBody, { type Response = ServiceResponse>>; type Error = Error; - type Future = CompressResponse; + type Future = Either, Ready>>; actix_service::forward_ready!(service); #[allow(clippy::borrow_interior_mutable_const)] fn call(&self, req: ServiceRequest) -> Self::Future { // negotiate content-encoding - let encoding = if let Some(val) = req.headers().get(&ACCEPT_ENCODING) { - if let Ok(enc) = val.to_str() { - AcceptEncoding::parse(enc, self.encoding) - } else { - ContentEncoding::Identity - } - } else { - ContentEncoding::Identity - }; + let encoding_result = req + .headers() + .get(&ACCEPT_ENCODING) + .and_then(|val| val.to_str().ok()) + .map(|enc| AcceptEncoding::try_parse(enc, self.encoding)); - CompressResponse { - encoding, - fut: self.service.call(req), - _phantom: PhantomData, + match encoding_result { + // Missing header => fallback to identity + None => Either::left(CompressResponse { + encoding: ContentEncoding::Identity, + fut: self.service.call(req), + _phantom: PhantomData, + }), + + // Valid encoding + Some(Ok(encoding)) => Either::left(CompressResponse { + encoding, + fut: self.service.call(req), + _phantom: PhantomData, + }), + + // There is an HTTP header but we cannot match what client as asked for + Some(Err(_)) => { + let res = HttpResponse::with_body( + StatusCode::NOT_ACCEPTABLE, + SUPPORTED_ALGORITHM_NAMES.as_str(), + ); + let enc = ContentEncoding::Identity; + + Either::right(ok(req.into_response(res.map_body(move |head, body| { + Encoder::response(enc, head, ResponseBody::Other(body.into())) + })))) + } } } } @@ -114,7 +160,6 @@ where pub struct CompressResponse where S: Service, - B: MessageBody, { #[pin] fut: S::Future, @@ -151,6 +196,7 @@ where struct AcceptEncoding { encoding: ContentEncoding, + // TODO: use Quality or QualityItem quality: f64, } @@ -177,26 +223,56 @@ impl PartialOrd for AcceptEncoding { impl PartialEq for AcceptEncoding { fn eq(&self, other: &AcceptEncoding) -> bool { - self.quality == other.quality + self.encoding == other.encoding && self.quality == other.quality } } +/// Parse q-factor from quality strings. +/// +/// If parse fail, then fallback to default value which is 1. +/// More details available here: +fn parse_quality(parts: &[&str]) -> f64 { + for part in parts { + if part.trim().starts_with("q=") { + return part[2..].parse().unwrap_or(1.0); + } + } + + 1.0 +} + +#[derive(Debug, PartialEq, Eq)] +enum AcceptEncodingError { + /// This error occurs when client only support compressed response and server do not have any + /// algorithm that match client accepted algorithms. + CompressionAlgorithmMismatch, +} + impl AcceptEncoding { fn new(tag: &str) -> Option { let parts: Vec<&str> = tag.split(';').collect(); let encoding = match parts.len() { 0 => return None, - _ => ContentEncoding::from(parts[0]), - }; - let quality = match parts.len() { - 1 => encoding.quality(), - _ => f64::from_str(parts[1]).unwrap_or(0.0), + _ => match ContentEncoding::try_from(parts[0]) { + Err(_) => return None, + Ok(x) => x, + }, }; + + let quality = parse_quality(&parts[1..]); + if quality <= 0.0 || quality > 1.0 { + return None; + } + Some(AcceptEncoding { encoding, quality }) } - /// Parse a raw Accept-Encoding header value into an ordered list. - pub fn parse(raw: &str, encoding: ContentEncoding) -> ContentEncoding { + /// Parse a raw Accept-Encoding header value into an ordered list then return the best match + /// based on middleware configuration. + pub fn try_parse( + raw: &str, + encoding: ContentEncoding, + ) -> Result { let mut encodings = raw .replace(' ', "") .split(',') @@ -206,13 +282,90 @@ impl AcceptEncoding { encodings.sort(); for enc in encodings { - if encoding == ContentEncoding::Auto { - return enc.encoding; - } else if encoding == enc.encoding { - return encoding; + if encoding == ContentEncoding::Auto || encoding == enc.encoding { + return Ok(enc.encoding); } } - ContentEncoding::Identity + // Special case if user cannot accept uncompressed data. + // See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + // TODO: account for whitespace + if raw.contains("*;q=0") || raw.contains("identity;q=0") { + return Err(AcceptEncodingError::CompressionAlgorithmMismatch); + } + + Ok(ContentEncoding::Identity) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + macro_rules! assert_parse_eq { + ($raw:expr, $result:expr) => { + assert_eq!( + AcceptEncoding::try_parse($raw, ContentEncoding::Auto), + Ok($result) + ); + }; + } + + macro_rules! assert_parse_fail { + ($raw:expr) => { + assert!(AcceptEncoding::try_parse($raw, ContentEncoding::Auto).is_err()); + }; + } + + #[test] + fn test_parse_encoding() { + // Test simple case + assert_parse_eq!("br", ContentEncoding::Br); + assert_parse_eq!("gzip", ContentEncoding::Gzip); + assert_parse_eq!("deflate", ContentEncoding::Deflate); + assert_parse_eq!("zstd", ContentEncoding::Zstd); + + // Test space, trim, missing values + assert_parse_eq!("br,,,,", ContentEncoding::Br); + assert_parse_eq!("gzip , br, zstd", ContentEncoding::Gzip); + + // Test float number parsing + assert_parse_eq!("br;q=1 ,", ContentEncoding::Br); + assert_parse_eq!("br;q=1.0 , br", ContentEncoding::Br); + + // Test wildcard + assert_parse_eq!("*", ContentEncoding::Identity); + assert_parse_eq!("*;q=1.0", ContentEncoding::Identity); + } + + #[test] + fn test_parse_encoding_qfactor_ordering() { + assert_parse_eq!("gzip, br, zstd", ContentEncoding::Gzip); + assert_parse_eq!("zstd, br, gzip", ContentEncoding::Zstd); + + assert_parse_eq!("gzip;q=0.4, br;q=0.6", ContentEncoding::Br); + assert_parse_eq!("gzip;q=0.8, br;q=0.4", ContentEncoding::Gzip); + } + + #[test] + fn test_parse_encoding_qfactor_invalid() { + // Out of range + assert_parse_eq!("gzip;q=-5.0", ContentEncoding::Identity); + assert_parse_eq!("gzip;q=5.0", ContentEncoding::Identity); + + // Disabled + assert_parse_eq!("gzip;q=0", ContentEncoding::Identity); + } + + #[test] + fn test_parse_compression_required() { + // Check we fallback to identity if there is an unsupported compression algorithm + assert_parse_eq!("compress", ContentEncoding::Identity); + + // User do not want any compression + assert_parse_fail!("compress, identity;q=0"); + assert_parse_fail!("compress, identity;q=0.0"); + assert_parse_fail!("compress, *;q=0"); + assert_parse_fail!("compress, *;q=0.0"); } } diff --git a/tests/test_server.rs b/tests/test_server.rs index afea39dd9..beb8ff0f5 100644 --- a/tests/test_server.rs +++ b/tests/test_server.rs @@ -1077,3 +1077,22 @@ async fn test_data_drop() { assert_eq!(num.load(Ordering::SeqCst), 0); } + +#[actix_rt::test] +async fn test_accept_encoding_no_match() { + let srv = actix_test::start_with(actix_test::config().h1(), || { + App::new() + .wrap(Compress::default()) + .service(web::resource("/").route(web::to(move || HttpResponse::Ok().finish()))) + }); + + let response = srv + .get("/") + .append_header((ACCEPT_ENCODING, "compress, identity;q=0")) + .no_decompress() + .send() + .await + .unwrap(); + + assert_eq!(response.status().as_u16(), 406); +} From 93112644d3da17833ea03fc7856329ec2f35ba1c Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Wed, 1 Sep 2021 09:53:26 +0100 Subject: [PATCH 16/23] non exhaustive content encoding (#2377) --- Cargo.toml | 2 +- actix-http/CHANGES.md | 3 ++ actix-http/Cargo.toml | 2 +- actix-http/src/encoding/decoder.rs | 3 +- .../src/header/shared/content_encoding.rs | 17 ++++------- src/scope.rs | 4 +-- src/test.rs | 2 +- src/types/form.rs | 2 +- src/types/json.rs | 30 ++++--------------- src/types/query.rs | 30 ++++++++----------- 10 files changed, 35 insertions(+), 60 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index cee401363..699717b4d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -99,7 +99,7 @@ regex = "1.4" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" serde_urlencoded = "0.7" -smallvec = "1.6" +smallvec = "1.6.1" socket2 = "0.4.0" time = { version = "0.2.23", default-features = false, features = ["std"] } url = "2.1" diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index f4efef54a..65206cf9a 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -2,6 +2,7 @@ ## Unreleased - 2021-xx-xx ### Changed +* `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377] * Minimum supported Rust version (MSRV) is now 1.51. ### Fixed @@ -12,12 +13,14 @@ [#2364]: https://github.com/actix/actix-web/pull/2364 [#2375]: https://github.com/actix/actix-web/pull/2375 [#2344]: https://github.com/actix/actix-web/pull/2344 +[#2377]: https://github.com/actix/actix-web/pull/2377 ## 3.0.0-beta.8 - 2021-08-09 ### Fixed * Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) + ## 3.0.0-beta.8 - 2021-06-26 ### Changed * Change compression algorithm features flags. [#2250] diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index 68f980982..54505a215 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -73,7 +73,7 @@ rand = "0.8" regex = "1.3" serde = "1.0" sha-1 = "0.9" -smallvec = "1.6" +smallvec = "1.6.1" time = { version = "0.2.23", default-features = false, features = ["std"] } tokio = { version = "1.2", features = ["sync"] } diff --git a/actix-http/src/encoding/decoder.rs b/actix-http/src/encoding/decoder.rs index 81e97d916..c32983fc7 100644 --- a/actix-http/src/encoding/decoder.rs +++ b/actix-http/src/encoding/decoder.rs @@ -1,7 +1,6 @@ //! Stream decoders. use std::{ - convert::TryFrom, future::Future, io::{self, Write as _}, pin::Pin, @@ -81,7 +80,7 @@ where let encoding = headers .get(&CONTENT_ENCODING) .and_then(|val| val.to_str().ok()) - .and_then(|x| ContentEncoding::try_from(x).ok()) + .and_then(|x| x.parse().ok()) .unwrap_or(ContentEncoding::Identity); Self::new(stream, encoding) diff --git a/actix-http/src/header/shared/content_encoding.rs b/actix-http/src/header/shared/content_encoding.rs index 375e8c2fa..1af109c06 100644 --- a/actix-http/src/header/shared/content_encoding.rs +++ b/actix-http/src/header/shared/content_encoding.rs @@ -1,5 +1,6 @@ -use std::{convert::TryFrom, error, fmt, str::FromStr}; +use std::{convert::TryFrom, str::FromStr}; +use derive_more::{Display, Error}; use http::header::InvalidHeaderValue; use crate::{ @@ -11,19 +12,13 @@ use crate::{ /// Error return when a content encoding is unknown. /// /// Example: 'compress' -#[derive(Debug)] +#[derive(Debug, Display, Error)] +#[display(fmt = "unsupported content encoding")] pub struct ContentEncodingParseError; -impl fmt::Display for ContentEncodingParseError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Unsupported content encoding") - } -} - -impl error::Error for ContentEncodingParseError {} - /// Represents a supported content encoding. -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Debug, Clone, Copy, PartialEq)] +#[non_exhaustive] pub enum ContentEncoding { /// Automatically select encoding based on encoding negotiation. Auto, diff --git a/src/scope.rs b/src/scope.rs index b2edaedab..7d914f581 100644 --- a/src/scope.rs +++ b/src/scope.rs @@ -41,9 +41,9 @@ type HttpNewService = BoxServiceFactory<(), ServiceRequest, ServiceResponse, Err /// fn main() { /// let app = App::new().service( /// web::scope("/{project_id}/") -/// .service(web::resource("/path1").to(|| async { HttpResponse::Ok() })) +/// .service(web::resource("/path1").to(|| async { "OK" })) /// .service(web::resource("/path2").route(web::get().to(|| HttpResponse::Ok()))) -/// .service(web::resource("/path3").route(web::head().to(|| HttpResponse::MethodNotAllowed()))) +/// .service(web::resource("/path3").route(web::head().to(HttpResponse::MethodNotAllowed))) /// ); /// } /// ``` diff --git a/src/test.rs b/src/test.rs index 634826d19..34dd6f2d3 100644 --- a/src/test.rs +++ b/src/test.rs @@ -56,7 +56,7 @@ pub fn default_service( /// async fn test_init_service() { /// let app = test::init_service( /// App::new() -/// .service(web::resource("/test").to(|| async { HttpResponse::Ok() })) +/// .service(web::resource("/test").to(|| async { "OK" })) /// ).await; /// /// // Create request object diff --git a/src/types/form.rs b/src/types/form.rs index c81f73554..2ace0e063 100644 --- a/src/types/form.rs +++ b/src/types/form.rs @@ -30,7 +30,7 @@ use crate::{ /// /// # Extractor /// To extract typed data from a request body, the inner type `T` must implement the -/// [`serde::Deserialize`] trait. +/// [`DeserializeOwned`] trait. /// /// Use [`FormConfig`] to configure extraction process. /// diff --git a/src/types/json.rs b/src/types/json.rs index ab9708c53..8c2f51a68 100644 --- a/src/types/json.rs +++ b/src/types/json.rs @@ -97,19 +97,13 @@ impl ops::DerefMut for Json { } } -impl fmt::Display for Json -where - T: fmt::Display, -{ +impl fmt::Display for Json { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } -impl Serialize for Json -where - T: Serialize, -{ +impl Serialize for Json { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -133,10 +127,7 @@ impl Responder for Json { } /// See [here](#extractor) for example of usage as an extractor. -impl FromRequest for Json -where - T: DeserializeOwned + 'static, -{ +impl FromRequest for Json { type Error = Error; type Future = JsonExtractFut; type Config = JsonConfig; @@ -166,10 +157,7 @@ pub struct JsonExtractFut { err_handler: JsonErrorHandler, } -impl Future for JsonExtractFut -where - T: DeserializeOwned + 'static, -{ +impl Future for JsonExtractFut { type Output = Result, Error>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { @@ -311,10 +299,7 @@ pub enum JsonBody { impl Unpin for JsonBody {} -impl JsonBody -where - T: DeserializeOwned + 'static, -{ +impl JsonBody { /// Create a new future to decode a JSON request payload. #[allow(clippy::borrow_interior_mutable_const)] pub fn new( @@ -395,10 +380,7 @@ where } } -impl Future for JsonBody -where - T: DeserializeOwned + 'static, -{ +impl Future for JsonBody { type Output = Result; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { diff --git a/src/types/query.rs b/src/types/query.rs index 1e6f1111f..73d08d092 100644 --- a/src/types/query.rs +++ b/src/types/query.rs @@ -3,14 +3,14 @@ use std::{fmt, ops, sync::Arc}; use actix_utils::future::{err, ok, Ready}; -use serde::de; +use serde::de::DeserializeOwned; use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequest}; /// Extract typed information from the request's query. /// /// To extract typed data from the URL query string, the inner type `T` must implement the -/// [`serde::Deserialize`] trait. +/// [`DeserializeOwned`] trait. /// /// Use [`QueryConfig`] to configure extraction process. /// @@ -46,18 +46,18 @@ use crate::{dev::Payload, error::QueryPayloadError, Error, FromRequest, HttpRequ /// // To access the entire underlying query struct, use `.into_inner()`. /// #[get("/debug1")] /// async fn debug1(info: web::Query) -> String { -/// dbg!("Authorization object={:?}", info.into_inner()); +/// dbg!("Authorization object = {:?}", info.into_inner()); /// "OK".to_string() /// } /// -/// // Or use `.0`, which is equivalent to `.into_inner()`. +/// // Or use destructuring, which is equivalent to `.into_inner()`. /// #[get("/debug2")] -/// async fn debug2(info: web::Query) -> String { -/// dbg!("Authorization object={:?}", info.0); +/// async fn debug2(web::Query(info): web::Query) -> String { +/// dbg!("Authorization object = {:?}", info); /// "OK".to_string() /// } /// ``` -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct Query(pub T); impl Query { @@ -65,8 +65,10 @@ impl Query { pub fn into_inner(self) -> T { self.0 } +} - /// Deserialize `T` from a URL encoded query parameter string. +impl Query { + /// Deserialize a `T` from the URL encoded query parameter string. /// /// ``` /// # use std::collections::HashMap; @@ -76,10 +78,7 @@ impl Query { /// assert_eq!(numbers.get("two"), Some(&2)); /// assert!(numbers.get("three").is_none()); /// ``` - pub fn from_query(query_str: &str) -> Result - where - T: de::DeserializeOwned, - { + pub fn from_query(query_str: &str) -> Result { serde_urlencoded::from_str::(query_str) .map(Self) .map_err(QueryPayloadError::Deserialize) @@ -107,10 +106,7 @@ impl fmt::Display for Query { } /// See [here](#usage) for example of usage as an extractor. -impl FromRequest for Query -where - T: de::DeserializeOwned, -{ +impl FromRequest for Query { type Error = Error; type Future = Ready>; type Config = QueryConfig; @@ -165,7 +161,7 @@ where /// let query_cfg = web::QueryConfig::default() /// // use custom error handler /// .error_handler(|err, req| { -/// error::InternalError::from_response(err, HttpResponse::Conflict().into()).into() +/// error::InternalError::from_response(err, HttpResponse::Conflict().finish()).into() /// }); /// /// App::new() From 53ec66caf47592b2bdfbbab2936d7ac727bcf315 Mon Sep 17 00:00:00 2001 From: Omid Rad Date: Wed, 1 Sep 2021 21:16:41 +0200 Subject: [PATCH 17/23] Send headers within the redirect requests. (#2310) --- awc/CHANGES.md | 3 + awc/src/middleware/redirect.rs | 365 +++++++++++++++++++++++++++------ 2 files changed, 303 insertions(+), 65 deletions(-) diff --git a/awc/CHANGES.md b/awc/CHANGES.md index 16132be1c..9c6f258aa 100644 --- a/awc/CHANGES.md +++ b/awc/CHANGES.md @@ -1,7 +1,10 @@ # Changes ## Unreleased - 2021-xx-xx +### Changed +* Send headers within the redirect requests. [#2310] +[#2310]: https://github.com/actix/actix-web/pull/2310 ## 3.0.0-beta.7 - 2021-06-26 ### Changed diff --git a/awc/src/middleware/redirect.rs b/awc/src/middleware/redirect.rs index ae09edf9c..a8c14d549 100644 --- a/awc/src/middleware/redirect.rs +++ b/awc/src/middleware/redirect.rs @@ -85,10 +85,12 @@ where let max_redirect_times = self.max_redirect_times; // backup the uri and method for reuse schema and authority. - let (uri, method) = match head { - RequestHeadType::Owned(ref head) => (head.uri.clone(), head.method.clone()), + let (uri, method, headers) = match head { + RequestHeadType::Owned(ref head) => { + (head.uri.clone(), head.method.clone(), head.headers.clone()) + } RequestHeadType::Rc(ref head, ..) => { - (head.uri.clone(), head.method.clone()) + (head.uri.clone(), head.method.clone(), head.headers.clone()) } }; @@ -104,6 +106,7 @@ where max_redirect_times, uri: Some(uri), method: Some(method), + headers: Some(headers), body: body_opt, addr, connector: Some(connector), @@ -127,9 +130,10 @@ pin_project_lite::pin_project! { max_redirect_times: u8, uri: Option, method: Option, + headers: Option, body: Option, addr: Option, - connector: Option> + connector: Option>, } } } @@ -148,6 +152,7 @@ where max_redirect_times, uri, method, + headers, body, addr, connector, @@ -156,79 +161,60 @@ where StatusCode::MOVED_PERMANENTLY | StatusCode::FOUND | StatusCode::SEE_OTHER + | StatusCode::TEMPORARY_REDIRECT + | StatusCode::PERMANENT_REDIRECT if *max_redirect_times > 0 => { - let org_uri = uri.take().unwrap(); - // rebuild uri from the location header value. - let uri = rebuild_uri(&res, org_uri)?; + let is_redirect = res.head().status == StatusCode::TEMPORARY_REDIRECT + || res.head().status == StatusCode::PERMANENT_REDIRECT; - // reset method - let method = method.take().unwrap(); - let method = match method { - Method::GET | Method::HEAD => method, - _ => Method::GET, - }; + let prev_uri = uri.take().unwrap(); + + // rebuild uri from the location header value. + let next_uri = build_next_uri(&res, &prev_uri)?; // take ownership of states that could be reused let addr = addr.take(); let connector = connector.take(); - let mut max_redirect_times = *max_redirect_times; - // use a new request head. - let mut head = RequestHead::default(); - head.uri = uri.clone(); - head.method = method.clone(); - - let head = RequestHeadType::Owned(head); - - max_redirect_times -= 1; - - let fut = connector - .as_ref() - .unwrap() - // remove body - .call(ConnectRequest::Client(head, Body::None, addr)); - - self.set(RedirectServiceFuture::Client { - fut, - max_redirect_times, - uri: Some(uri), - method: Some(method), - // body is dropped on 301,302,303 - body: None, - addr, - connector, - }); - - self.poll(cx) - } - StatusCode::TEMPORARY_REDIRECT | StatusCode::PERMANENT_REDIRECT - if *max_redirect_times > 0 => - { - let org_uri = uri.take().unwrap(); - // rebuild uri from the location header value. - let uri = rebuild_uri(&res, org_uri)?; - - // try to reuse body - let body = body.take(); - let body_new = match body { - Some(ref bytes) => Body::Bytes(bytes.clone()), - // TODO: should this be Body::Empty or Body::None. - _ => Body::Empty, + // reset method + let method = if is_redirect { + method.take().unwrap() + } else { + let method = method.take().unwrap(); + match method { + Method::GET | Method::HEAD => method, + _ => Method::GET, + } }; - let addr = addr.take(); - let method = method.take().unwrap(); - let connector = connector.take(); - let mut max_redirect_times = *max_redirect_times; + let mut body = body.take(); + let body_new = if is_redirect { + // try to reuse body + match body { + Some(ref bytes) => Body::Bytes(bytes.clone()), + // TODO: should this be Body::Empty or Body::None. + _ => Body::Empty, + } + } else { + body = None; + // remove body + Body::None + }; + + let mut headers = headers.take().unwrap(); + + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); // use a new request head. let mut head = RequestHead::default(); - head.uri = uri.clone(); + head.uri = next_uri.clone(); head.method = method.clone(); + head.headers = headers.clone(); let head = RequestHeadType::Owned(head); + let mut max_redirect_times = *max_redirect_times; max_redirect_times -= 1; let fut = connector @@ -239,8 +225,9 @@ where self.set(RedirectServiceFuture::Client { fut, max_redirect_times, - uri: Some(uri), + uri: Some(next_uri), method: Some(method), + headers: Some(headers), body, addr, connector, @@ -256,7 +243,7 @@ where } } -fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result { +fn build_next_uri(res: &ClientResponse, prev_uri: &Uri) -> Result { let uri = res .headers() .get(header::LOCATION) @@ -266,8 +253,8 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result(uri) @@ -281,12 +268,25 @@ fn rebuild_uri(res: &ClientResponse, org_uri: Uri) -> Result HttpResponse { + HttpResponse::TemporaryRedirect() + .append_header(("location", "/test")) + .finish() + } + + async fn test(req: HttpRequest, body: Bytes) -> HttpResponse { + if req.method() == Method::POST && !body.is_empty() { + HttpResponse::Ok().finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + App::new() + .service(web::resource("/").route(web::to(root))) + .service(web::resource("/test").route(web::to(test))) + }); + + let res = srv.post("/").send_body("Hello").await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + } + + #[actix_rt::test] + async fn test_redirect_status_kind_301_302_303() { + let srv = actix_test::start(|| { + async fn root() -> HttpResponse { + HttpResponse::Found() + .append_header(("location", "/test")) + .finish() + } + + async fn test(req: HttpRequest, body: Bytes) -> HttpResponse { + if (req.method() == Method::GET || req.method() == Method::HEAD) + && body.is_empty() + { + HttpResponse::Ok().finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + App::new() + .service(web::resource("/").route(web::to(root))) + .service(web::resource("/test").route(web::to(test))) + }); + + let res = srv.post("/").send_body("Hello").await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + + let res = srv.post("/").send().await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + } + + #[actix_rt::test] + async fn test_redirect_headers() { + let srv = actix_test::start(|| { + async fn root(req: HttpRequest) -> HttpResponse { + if req + .headers() + .get("custom") + .unwrap_or(&HeaderValue::from_str("").unwrap()) + == "value" + { + HttpResponse::Found() + .append_header(("location", "/test")) + .finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + async fn test(req: HttpRequest) -> HttpResponse { + if req + .headers() + .get("custom") + .unwrap_or(&HeaderValue::from_str("").unwrap()) + == "value" + { + HttpResponse::Ok().finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + App::new() + .service(web::resource("/").route(web::to(root))) + .service(web::resource("/test").route(web::to(test))) + }); + + let client = ClientBuilder::new() + .header("custom", "value") + .disable_redirects() + .finish(); + let res = client.get(srv.url("/")).send().await.unwrap(); + assert_eq!(res.status().as_u16(), 302); + + let client = ClientBuilder::new().header("custom", "value").finish(); + let res = client.get(srv.url("/")).send().await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + + let client = ClientBuilder::new().finish(); + let res = client + .get(srv.url("/")) + .insert_header(("custom", "value")) + .send() + .await + .unwrap(); + assert_eq!(res.status().as_u16(), 200); + } + + #[actix_rt::test] + async fn test_redirect_cross_origin_headers() { + // defining two services to have two different origins + let srv2 = actix_test::start(|| { + async fn root(req: HttpRequest) -> HttpResponse { + if req.headers().get(header::AUTHORIZATION).is_none() { + HttpResponse::Ok().finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + App::new().service(web::resource("/").route(web::to(root))) + }); + let srv2_port: u16 = srv2.addr().port(); + + let srv1 = actix_test::start(move || { + async fn root(req: HttpRequest) -> HttpResponse { + let port = *req.app_data::().unwrap(); + if req.headers().get(header::AUTHORIZATION).is_some() { + HttpResponse::Found() + .append_header(( + "location", + format!("http://localhost:{}/", port).as_str(), + )) + .finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + async fn test1(req: HttpRequest) -> HttpResponse { + if req.headers().get(header::AUTHORIZATION).is_some() { + HttpResponse::Found() + .append_header(("location", "/test2")) + .finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + async fn test2(req: HttpRequest) -> HttpResponse { + if req.headers().get(header::AUTHORIZATION).is_some() { + HttpResponse::Ok().finish() + } else { + HttpResponse::InternalServerError().finish() + } + } + + App::new() + .app_data(srv2_port) + .service(web::resource("/").route(web::to(root))) + .service(web::resource("/test1").route(web::to(test1))) + .service(web::resource("/test2").route(web::to(test2))) + }); + + // send a request to different origins, http://srv1/ then http://srv2/. So it should remove the header + let client = ClientBuilder::new() + .header(header::AUTHORIZATION, "auth_key_value") + .finish(); + let res = client.get(srv1.url("/")).send().await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + + // send a request to same origin, http://srv1/test1 then http://srv1/test2. So it should NOT remove any header + let res = client.get(srv1.url("/test1")).send().await.unwrap(); + assert_eq!(res.status().as_u16(), 200); + } + + #[actix_rt::test] + async fn test_remove_sensitive_headers() { + fn gen_headers() -> header::HeaderMap { + let mut headers = header::HeaderMap::new(); + headers.insert(header::USER_AGENT, HeaderValue::from_str("value").unwrap()); + headers.insert( + header::AUTHORIZATION, + HeaderValue::from_str("value").unwrap(), + ); + headers.insert( + header::PROXY_AUTHORIZATION, + HeaderValue::from_str("value").unwrap(), + ); + headers.insert(header::COOKIE, HeaderValue::from_str("value").unwrap()); + headers + } + + // Same origin + let prev_uri = Uri::from_str("https://host/path1").unwrap(); + let next_uri = Uri::from_str("https://host/path2").unwrap(); + let mut headers = gen_headers(); + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); + assert_eq!(headers.len(), 4); + + // different schema + let prev_uri = Uri::from_str("http://host/").unwrap(); + let next_uri = Uri::from_str("https://host/").unwrap(); + let mut headers = gen_headers(); + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); + assert_eq!(headers.len(), 1); + + // different host + let prev_uri = Uri::from_str("https://host1/").unwrap(); + let next_uri = Uri::from_str("https://host2/").unwrap(); + let mut headers = gen_headers(); + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); + assert_eq!(headers.len(), 1); + + // different port + let prev_uri = Uri::from_str("https://host:12/").unwrap(); + let next_uri = Uri::from_str("https://host:23/").unwrap(); + let mut headers = gen_headers(); + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); + assert_eq!(headers.len(), 1); + + // different everything! + let prev_uri = Uri::from_str("http://host1:12/path1").unwrap(); + let next_uri = Uri::from_str("https://host2:23/path2").unwrap(); + let mut headers = gen_headers(); + remove_sensitive_headers(&mut headers, &prev_uri, &next_uri); + assert_eq!(headers.len(), 1); + } } From d8a0f46f264dd52a8d17a8c97036dcf9fc717cbb Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Fri, 3 Sep 2021 18:00:43 +0100 Subject: [PATCH 18/23] refactor web module (#2379) --- .cargo/config.toml | 2 +- .github/workflows/ci.yml | 40 ++++- CHANGES.md | 5 +- src/dev.rs | 4 +- src/http/header/content_disposition.rs | 10 +- src/lib.rs | 1 - src/service.rs | 2 +- src/web.rs | 220 +++++++------------------ 8 files changed, 103 insertions(+), 181 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index db47ca46d..f417a7053 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -6,4 +6,4 @@ ci-min-test = "hack check --workspace --no-default-features --tests --examples" ci-default = "check --workspace --bins --tests --examples" ci-full = "check --workspace --all-features --bins --tests --examples" ci-test = "test --workspace --all-features --lib --tests --no-fail-fast -- --nocapture" -ci-doctest = "hack test --workspace --all-features --doc --no-fail-fast -- --nocapture" +ci-doctest = "test --workspace --all-features --doc --no-fail-fast -- --nocapture" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 221d2fb40..647501579 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -80,13 +80,6 @@ jobs: command: ci-test args: --skip=test_reading_deflate_encoding_large_random_rustls - - name: doc tests - # due to unknown issue with running doc tests on macOS - if: matrix.target.os == 'ubuntu-latest' - uses: actions-rs/cargo@v1 - timeout-minutes: 40 - with: { command: ci-doctest } - - name: Generate coverage file if: > matrix.target.os == 'ubuntu-latest' @@ -106,5 +99,36 @@ jobs: - name: Clear the cargo caches run: | - cargo install cargo-cache --version 0.6.2 --no-default-features --features ci-autoclean + cargo install cargo-cache --version 0.6.3 --no-default-features --features ci-autoclean cargo-cache + + rustdoc: + name: rustdoc + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Install Rust (nightly) + uses: actions-rs/toolchain@v1 + with: + toolchain: nightly-x86_64-unknown-linux-gnu + profile: minimal + override: true + + - name: Generate Cargo.lock + uses: actions-rs/cargo@v1 + with: { command: generate-lockfile } + - name: Cache Dependencies + uses: Swatinem/rust-cache@v1.3.0 + + - name: Install cargo-hack + uses: actions-rs/cargo@v1 + with: + command: install + args: cargo-hack + + - name: doc tests + uses: actions-rs/cargo@v1 + timeout-minutes: 40 + with: { command: ci-doctest } diff --git a/CHANGES.md b/CHANGES.md index 217ec4f78..6826be075 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,14 +5,17 @@ * Re-export actix-service `ServiceFactory` in `dev` module. [#2325] ### Changed -* Minimum supported Rust version (MSRV) is now 1.51. * Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344] +* Move `BaseHttpResponse` to `dev::Response`. [#2379] +* Minimum supported Rust version (MSRV) is now 1.51. ### Fixed * Fix quality parse error in Accept-Encoding header. [#2344] +* Re-export correct type at `web::HttpResponse`. [#2379] [#2325]: https://github.com/actix/actix-web/pull/2325 [#2344]: https://github.com/actix/actix-web/pull/2344 +[#2379]: https://github.com/actix/actix-web/pull/2379 ## 4.0.0-beta.8 - 2021-06-26 diff --git a/src/dev.rs b/src/dev.rs index 0817d902f..be3af86a8 100644 --- a/src/dev.rs +++ b/src/dev.rs @@ -18,7 +18,7 @@ pub use actix_http::body::{AnyBody, Body, BodySize, MessageBody, ResponseBody, S #[cfg(feature = "__compress")] pub use actix_http::encoding::Decoder as Decompress; -pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, ResponseHead}; +pub use actix_http::{Extensions, Payload, PayloadStream, RequestHead, Response, ResponseHead}; pub use actix_router::{Path, ResourceDef, ResourcePath, Url}; pub use actix_server::Server; pub use actix_service::{ @@ -26,7 +26,7 @@ pub use actix_service::{ }; use crate::http::header::ContentEncoding; -use actix_http::{Response, ResponseBuilder}; +use actix_http::ResponseBuilder; use actix_router::Patterns; diff --git a/src/http/header/content_disposition.rs b/src/http/header/content_disposition.rs index 6e75fde92..fdd8a7dac 100644 --- a/src/http/header/content_disposition.rs +++ b/src/http/header/content_disposition.rs @@ -1,10 +1,10 @@ //! # References //! -//! "The Content-Disposition Header Field" https://www.ietf.org/rfc/rfc2183.txt -//! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" https://www.ietf.org/rfc/rfc6266.txt -//! "Returning Values from Forms: multipart/form-data" https://www.ietf.org/rfc/rfc7578.txt -//! Browser conformance tests at: http://greenbytes.de/tech/tc2231/ -//! IANA assignment: http://www.iana.org/assignments/cont-disp/cont-disp.xhtml +//! "The Content-Disposition Header Field" +//! "The Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)" +//! "Returning Values from Forms: multipart/form-data" +//! Browser conformance tests at: +//! IANA assignment: use once_cell::sync::Lazy; use regex::Regex; diff --git a/src/lib.rs b/src/lib.rs index e7cf46361..d008fdb7f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -96,7 +96,6 @@ pub mod test; pub(crate) mod types; pub mod web; -pub use actix_http::Response as BaseHttpResponse; pub use actix_http::{body, HttpMessage}; #[doc(inline)] pub use actix_rt as rt; diff --git a/src/service.rs b/src/service.rs index 48167e5b3..b9fa0e128 100644 --- a/src/service.rs +++ b/src/service.rs @@ -476,7 +476,7 @@ impl WebService { /// Set service name. /// - /// Name is used for url generation. + /// Name is used for URL generation. pub fn name(mut self, name: &str) -> Self { self.name = Some(name.to_string()); self diff --git a/src/web.rs b/src/web.rs index 108ff314f..40d7636cf 100644 --- a/src/web.rs +++ b/src/web.rs @@ -3,44 +3,36 @@ use std::future::Future; use actix_http::http::Method; -pub use actix_http::Response as HttpResponse; use actix_router::IntoPatterns; pub use bytes::{Buf, BufMut, Bytes, BytesMut}; -use crate::error::BlockingError; -use crate::extract::FromRequest; -use crate::handler::Handler; -use crate::resource::Resource; -use crate::responder::Responder; -use crate::route::Route; -use crate::scope::Scope; -use crate::service::WebService; +use crate::{ + error::BlockingError, extract::FromRequest, handler::Handler, resource::Resource, + responder::Responder, route::Route, scope::Scope, service::WebService, +}; pub use crate::config::ServiceConfig; pub use crate::data::Data; pub use crate::request::HttpRequest; pub use crate::request_data::ReqData; +pub use crate::response::HttpResponse; pub use crate::types::*; -/// Create resource for a specific path. +/// Creates a new resource for a specific path. /// -/// Resources may have variable path segments. For example, a -/// resource with the path `/a/{name}/c` would match all incoming -/// requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`. +/// Resources may have dynamic path segments. For example, a resource with the path `/a/{name}/c` +/// would match all incoming requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`. /// -/// A variable segment is specified in the form `{identifier}`, -/// where the identifier can be used later in a request handler to -/// access the matched value for that segment. This is done by -/// looking up the identifier in the `Params` object returned by -/// `HttpRequest.match_info()` method. +/// A dynamic segment is specified in the form `{identifier}`, where the identifier can be used +/// later in a request handler to access the matched value for that segment. This is done by looking +/// up the identifier in the `Path` object returned by [`HttpRequest.match_info()`] method. /// /// By default, each segment matches the regular expression `[^{}/]+`. /// /// You can also specify a custom regex in the form `{identifier:regex}`: /// -/// For instance, to route `GET`-requests on any route matching -/// `/users/{userid}/{friend}` and store `userid` and `friend` in -/// the exposed `Params` object: +/// For instance, to route `GET`-requests on any route matching `/users/{userid}/{friend}` and store +/// `userid` and `friend` in the exposed `Path` object: /// /// ``` /// use actix_web::{web, App, HttpResponse}; @@ -55,10 +47,16 @@ pub fn resource(path: T) -> Resource { Resource::new(path) } -/// Configure scope for common root path. +/// Creates scope for common path prefix. /// -/// Scopes collect multiple paths under a common path prefix. -/// Scope path can contain variable path segments as resources. +/// Scopes collect multiple paths under a common path prefix. The scope's path can contain dynamic +/// path segments. +/// +/// # Examples +/// In this example, three routes are set up (and will handle any method): +/// * `/{project_id}/path1` +/// * `/{project_id}/path2` +/// * `/{project_id}/path3` /// /// ``` /// use actix_web::{web, App, HttpResponse}; @@ -70,148 +68,50 @@ pub fn resource(path: T) -> Resource { /// .service(web::resource("/path3").to(|| HttpResponse::MethodNotAllowed())) /// ); /// ``` -/// -/// In the above example, three routes get added: -/// * /{project_id}/path1 -/// * /{project_id}/path2 -/// * /{project_id}/path3 -/// pub fn scope(path: &str) -> Scope { Scope::new(path) } -/// Create *route* without configuration. +/// Creates a new un-configured route. pub fn route() -> Route { Route::new() } -/// Create *route* with `GET` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::get().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `GET` route gets added: -/// * /{project_id} -/// -pub fn get() -> Route { - method(Method::GET) +macro_rules! method_route { + ($method_fn:ident, $method_const:ident) => { + paste::paste! { + #[doc = "Creates a new route with `" $method_const "` method guard."] + /// + /// # Examples + #[doc = "In this example, one `" $method_const " /{project_id}` route is set up:"] + /// ``` + /// use actix_web::{web, App, HttpResponse}; + /// + /// let app = App::new().service( + /// web::resource("/{project_id}") + #[doc = " .route(web::" $method_fn "().to(|| HttpResponse::Ok()))"] + /// + /// ); + /// ``` + pub fn $method_fn() -> Route { + method(Method::$method_const) + } + } + }; } -/// Create *route* with `POST` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::post().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `POST` route gets added: -/// * /{project_id} -/// -pub fn post() -> Route { - method(Method::POST) -} +method_route!(get, GET); +method_route!(post, POST); +method_route!(put, PUT); +method_route!(patch, PATCH); +method_route!(delete, DELETE); +method_route!(head, HEAD); +method_route!(trace, TRACE); -/// Create *route* with `PUT` method guard. +/// Creates a new route with specified method guard. /// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::put().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `PUT` route gets added: -/// * /{project_id} -/// -pub fn put() -> Route { - method(Method::PUT) -} - -/// Create *route* with `PATCH` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::patch().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `PATCH` route gets added: -/// * /{project_id} -/// -pub fn patch() -> Route { - method(Method::PATCH) -} - -/// Create *route* with `DELETE` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::delete().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `DELETE` route gets added: -/// * /{project_id} -/// -pub fn delete() -> Route { - method(Method::DELETE) -} - -/// Create *route* with `HEAD` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::head().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `HEAD` route gets added: -/// * /{project_id} -/// -pub fn head() -> Route { - method(Method::HEAD) -} - -/// Create *route* with `TRACE` method guard. -/// -/// ``` -/// use actix_web::{web, App, HttpResponse}; -/// -/// let app = App::new().service( -/// web::resource("/{project_id}") -/// .route(web::trace().to(|| HttpResponse::Ok())) -/// ); -/// ``` -/// -/// In the above example, one `HEAD` route gets added: -/// * /{project_id} -/// -pub fn trace() -> Route { - method(Method::TRACE) -} - -/// Create *route* and add method guard. +/// # Examples +/// In this example, one `GET /{project_id}` route is set up: /// /// ``` /// use actix_web::{web, http, App, HttpResponse}; @@ -221,15 +121,11 @@ pub fn trace() -> Route { /// .route(web::method(http::Method::GET).to(|| HttpResponse::Ok())) /// ); /// ``` -/// -/// In the above example, one `GET` route gets added: -/// * /{project_id} -/// pub fn method(method: Method) -> Route { Route::new().method(method) } -/// Create a new route and add handler. +/// Creates a new any-method route with handler. /// /// ``` /// use actix_web::{web, App, HttpResponse, Responder}; @@ -253,7 +149,7 @@ where Route::new().to(handler) } -/// Create raw service for a specific path. +/// Creates a raw service for a specific path. /// /// ``` /// use actix_web::{dev, web, guard, App, Error, HttpResponse}; @@ -272,8 +168,8 @@ pub fn service(path: T) -> WebService { WebService::new(path) } -/// Execute blocking function on a thread pool, returns future that resolves -/// to result of the function execution. +/// Executes blocking function on a thread pool, returns future that resolves to result of the +/// function execution. pub fn block(f: F) -> impl Future> where F: FnOnce() -> R + Send + 'static, From 1383c7d701c35df45abc425e70dae69d9bab1317 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Wed, 8 Sep 2021 17:42:14 +0100 Subject: [PATCH 19/23] speed up ci --- .github/workflows/ci.yml | 2 ++ Cargo.toml | 4 ++++ src/web.rs | 4 ++-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 647501579..1ec034bc8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,6 +24,8 @@ jobs: runs-on: ${{ matrix.target.os }} env: + CI: 1 + CARGO_INCREMENTAL: 0 VCPKGRS_DYNAMIC: 1 steps: diff --git a/Cargo.toml b/Cargo.toml index 699717b4d..05ed2eb2d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -118,6 +118,10 @@ rcgen = "0.8" tls-openssl = { package = "openssl", version = "0.10.9" } tls-rustls = { package = "rustls", version = "0.19.0" } +[profile.dev] +# Disabling debug info speeds up builds a bunch and we don't rely on it for debugging that much. +debug = 0 + [profile.release] lto = true opt-level = 3 diff --git a/src/web.rs b/src/web.rs index 40d7636cf..e9f5c8518 100644 --- a/src/web.rs +++ b/src/web.rs @@ -80,10 +80,10 @@ pub fn route() -> Route { macro_rules! method_route { ($method_fn:ident, $method_const:ident) => { paste::paste! { - #[doc = "Creates a new route with `" $method_const "` method guard."] + #[doc = " Creates a new route with `" $method_const "` method guard."] /// /// # Examples - #[doc = "In this example, one `" $method_const " /{project_id}` route is set up:"] + #[doc = " In this example, one `" $method_const " /{project_id}` route is set up:"] /// ``` /// use actix_web::{web, App, HttpResponse}; /// From 8dd30611faf552108638e7719025cbe5ac3d76e8 Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Wed, 8 Sep 2021 19:42:40 -0400 Subject: [PATCH 20/23] accept owned strings in TestRequest::param (#2172) * accept owned strings in TestRequest::param * bump actix-router to 0.4.0 * update changelog Co-authored-by: Rob Ede --- CHANGES.md | 2 ++ src/test.rs | 22 +++++++++++++++++----- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 6826be075..33898794b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -7,12 +7,14 @@ ### Changed * Compress middleware will return 406 Not Acceptable when no content encoding is acceptable to the client. [#2344] * Move `BaseHttpResponse` to `dev::Response`. [#2379] +* Enable `TestRequest::param` to accept more than just static strings. [#2172] * Minimum supported Rust version (MSRV) is now 1.51. ### Fixed * Fix quality parse error in Accept-Encoding header. [#2344] * Re-export correct type at `web::HttpResponse`. [#2379] +[#2172]: https://github.com/actix/actix-web/pull/2172 [#2325]: https://github.com/actix/actix-web/pull/2325 [#2344]: https://github.com/actix/actix-web/pull/2344 [#2379]: https://github.com/actix/actix-web/pull/2379 diff --git a/src/test.rs b/src/test.rs index 34dd6f2d3..99e708592 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,6 +1,6 @@ //! Various helpers for Actix applications to use during testing. -use std::{net::SocketAddr, rc::Rc}; +use std::{borrow::Cow, net::SocketAddr, rc::Rc}; pub use actix_http::test::TestBuffer; use actix_http::{ @@ -470,19 +470,31 @@ impl TestRequest { self } - /// Set request path pattern parameter - pub fn param(mut self, name: &'static str, value: &'static str) -> Self { + /// Set request path pattern parameter. + /// + /// # Examples + /// ``` + /// use actix_web::test::TestRequest; + /// + /// let req = TestRequest::default().param("foo", "bar"); + /// let req = TestRequest::default().param("foo".to_owned(), "bar".to_owned()); + /// ``` + pub fn param( + mut self, + name: impl Into>, + value: impl Into>, + ) -> Self { self.path.add_static(name, value); self } - /// Set peer addr + /// Set peer addr. pub fn peer_addr(mut self, addr: SocketAddr) -> Self { self.peer_addr = Some(addr); self } - /// Set request payload + /// Set request payload. pub fn set_payload>(mut self, data: B) -> Self { self.req.set_payload(data); self From ba88d3b4bf1cc3cccfd17d53f907422257e16944 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Thu, 9 Sep 2021 01:35:41 +0100 Subject: [PATCH 21/23] prepare actix-web beta.9 releases (#2381) * prepare actix-router release 0.5.0-beta.2 * prepare actix-web-codegen release 0.5.0-beta.4 * prepare actix-http release 3.0.0-beta.10 * prepare awc release 3.0.0-beta.8 * prepare actix-web release 4.0.0-beta.9 * prepare actix-http-test release 3.0.0-beta.6 * prepare actix-test release 0.1.0-beta.4 * prepare actix-files release 0.6.0-beta.7 * prepare actix-multipart release 0.4.0-beta.6 * prepare actix-web-actors release 4.0.0-beta.7 * fix http test version * re-add patch * update router repo url * fix http test readme version --- CHANGES.md | 3 +++ Cargo.toml | 10 +++++----- README.md | 4 ++-- actix-files/CHANGES.md | 3 +++ actix-files/Cargo.toml | 8 ++++---- actix-files/README.md | 4 ++-- actix-http-test/CHANGES.md | 3 +++ actix-http-test/Cargo.toml | 20 ++++++++++---------- actix-http-test/README.md | 4 ++-- actix-http/CHANGES.md | 5 ++++- actix-http/Cargo.toml | 4 ++-- actix-http/README.md | 4 ++-- actix-multipart/CHANGES.md | 3 +++ actix-multipart/Cargo.toml | 8 +++----- actix-multipart/README.md | 4 ++-- actix-router/CHANGES.md | 3 +++ actix-router/Cargo.toml | 4 ++-- actix-test/CHANGES.md | 3 +++ actix-test/Cargo.toml | 10 +++++----- actix-web-actors/CHANGES.md | 3 +++ actix-web-actors/Cargo.toml | 8 ++++---- actix-web-actors/README.md | 4 ++-- actix-web-codegen/CHANGES.md | 3 +++ actix-web-codegen/Cargo.toml | 6 +++--- actix-web-codegen/README.md | 4 ++-- awc/CHANGES.md | 4 ++++ awc/Cargo.toml | 10 +++++----- awc/README.md | 4 ++-- 28 files changed, 91 insertions(+), 62 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 33898794b..398ac477a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 4.0.0-beta.9 - 2021-09-09 ### Added * Re-export actix-service `ServiceFactory` in `dev` module. [#2325] diff --git a/Cargo.toml b/Cargo.toml index 05ed2eb2d..60525f3ac 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-web" -version = "4.0.0-beta.8" +version = "4.0.0-beta.9" authors = ["Nikolay Kim "] description = "Actix Web is a powerful, pragmatic, and extremely fast web framework for Rust" keywords = ["actix", "http", "web", "framework", "async"] @@ -69,15 +69,15 @@ __compress = [] [dependencies] actix-codec = "0.4.0" actix-macros = "0.2.1" -actix-router = "0.5.0-beta.1" +actix-router = "0.5.0-beta.2" actix-rt = "2.2" actix-server = "2.0.0-beta.3" actix-service = "2.0.0" actix-utils = "3.0.0" actix-tls = { version = "3.0.0-beta.5", default-features = false, optional = true } -actix-web-codegen = "0.5.0-beta.2" -actix-http = "3.0.0-beta.9" +actix-web-codegen = "0.5.0-beta.4" +actix-http = "3.0.0-beta.10" ahash = "0.7" bytes = "1" @@ -106,7 +106,7 @@ url = "2.1" [dev-dependencies] actix-test = { version = "0.1.0-beta.3", features = ["openssl", "rustls"] } -awc = { version = "3.0.0-beta.7", features = ["openssl"] } +awc = { version = "3.0.0-beta.8", features = ["openssl"] } brotli2 = "0.3.2" criterion = { version = "0.3", features = ["html_reports"] } diff --git a/README.md b/README.md index 33784d66a..13ec3a01a 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,10 @@

[![crates.io](https://img.shields.io/crates/v/actix-web?label=latest)](https://crates.io/crates/actix-web) -[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.8)](https://docs.rs/actix-web/4.0.0-beta.8) +[![Documentation](https://docs.rs/actix-web/badge.svg?version=4.0.0-beta.9)](https://docs.rs/actix-web/4.0.0-beta.9) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-web.svg) -[![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.8) +[![Dependency Status](https://deps.rs/crate/actix-web/4.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-web/4.0.0-beta.9)
[![build status](https://github.com/actix/actix-web/workflows/CI%20%28Linux%29/badge.svg?branch=master&event=push)](https://github.com/actix/actix-web/actions) [![codecov](https://codecov.io/gh/actix/actix-web/branch/master/graph/badge.svg)](https://codecov.io/gh/actix/actix-web) diff --git a/actix-files/CHANGES.md b/actix-files/CHANGES.md index 533f72291..6d1512c22 100644 --- a/actix-files/CHANGES.md +++ b/actix-files/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 0.6.0-beta.7 - 2021-09-09 * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-files/Cargo.toml b/actix-files/Cargo.toml index ef288215b..eccf49a77 100644 --- a/actix-files/Cargo.toml +++ b/actix-files/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-files" -version = "0.6.0-beta.6" +version = "0.6.0-beta.7" authors = ["Nikolay Kim "] description = "Static file serving for Actix Web" keywords = ["actix", "http", "async", "futures"] @@ -15,8 +15,8 @@ name = "actix_files" path = "src/lib.rs" [dependencies] -actix-web = { version = "4.0.0-beta.8", default-features = false } -actix-http = "3.0.0-beta.8" +actix-web = { version = "4.0.0-beta.9", default-features = false } +actix-http = "3.0.0-beta.10" actix-service = "2.0.0" actix-utils = "3.0.0" @@ -33,5 +33,5 @@ percent-encoding = "2.1" [dev-dependencies] actix-rt = "2.2" -actix-web = "4.0.0-beta.8" +actix-web = "4.0.0-beta.9" actix-test = "0.1.0-beta.3" diff --git a/actix-files/README.md b/actix-files/README.md index 5815ef563..31bbd036f 100644 --- a/actix-files/README.md +++ b/actix-files/README.md @@ -3,11 +3,11 @@ > Static file serving for Actix Web [![crates.io](https://img.shields.io/crates/v/actix-files?label=latest)](https://crates.io/crates/actix-files) -[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.6)](https://docs.rs/actix-files/0.6.0-beta.6) +[![Documentation](https://docs.rs/actix-files/badge.svg?version=0.6.0-beta.7)](https://docs.rs/actix-files/0.6.0-beta.7) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-files.svg)
-[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.6/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.6) +[![dependency status](https://deps.rs/crate/actix-files/0.6.0-beta.7/status.svg)](https://deps.rs/crate/actix-files/0.6.0-beta.7) [![Download](https://img.shields.io/crates/d/actix-files.svg)](https://crates.io/crates/actix-files) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-http-test/CHANGES.md b/actix-http-test/CHANGES.md index 39b6a3a66..69e96f98d 100644 --- a/actix-http-test/CHANGES.md +++ b/actix-http-test/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 3.0.0-beta.5 - 2021-09-09 * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-http-test/Cargo.toml b/actix-http-test/Cargo.toml index c04b5da49..e7fe7adc0 100644 --- a/actix-http-test/Cargo.toml +++ b/actix-http-test/Cargo.toml @@ -1,18 +1,18 @@ [package] name = "actix-http-test" -version = "3.0.0-beta.4" +version = "3.0.0-beta.5" authors = ["Nikolay Kim "] description = "Various helpers for Actix applications to use during testing" -readme = "README.md" keywords = ["http", "web", "framework", "async", "futures"] homepage = "https://actix.rs" repository = "https://github.com/actix/actix-web.git" -documentation = "https://docs.rs/actix-http-test/" -categories = ["network-programming", "asynchronous", - "web-programming::http-server", - "web-programming::websocket"] +categories = [ + "network-programming", + "asynchronous", + "web-programming::http-server", + "web-programming::websocket", +] license = "MIT OR Apache-2.0" -exclude = [".gitignore", ".cargo/config"] edition = "2018" [package.metadata.docs.rs] @@ -35,7 +35,7 @@ actix-tls = "3.0.0-beta.5" actix-utils = "3.0.0" actix-rt = "2.2" actix-server = "2.0.0-beta.3" -awc = { version = "3.0.0-beta.7", default-features = false } +awc = { version = "3.0.0-beta.8", default-features = false } base64 = "0.13" bytes = "1" @@ -51,5 +51,5 @@ time = { version = "0.2.23", default-features = false, features = ["std"] } tls-openssl = { version = "0.10.9", package = "openssl", optional = true } [dev-dependencies] -actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } -actix-http = "3.0.0-beta.8" +actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] } +actix-http = "3.0.0-beta.10" diff --git a/actix-http-test/README.md b/actix-http-test/README.md index 099fb385d..f75b9c137 100644 --- a/actix-http-test/README.md +++ b/actix-http-test/README.md @@ -3,11 +3,11 @@ > Various helpers for Actix applications to use during testing. [![crates.io](https://img.shields.io/crates/v/actix-http-test?label=latest)](https://crates.io/crates/actix-http-test) -[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.4)](https://docs.rs/actix-http-test/3.0.0-beta.4) +[![Documentation](https://docs.rs/actix-http-test/badge.svg?version=3.0.0-beta.5)](https://docs.rs/actix-http-test/3.0.0-beta.5) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http-test)
-[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.4/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.4) +[![Dependency Status](https://deps.rs/crate/actix-http-test/3.0.0-beta.5/status.svg)](https://deps.rs/crate/actix-http-test/3.0.0-beta.5) [![Download](https://img.shields.io/crates/d/actix-http-test.svg)](https://crates.io/crates/actix-http-test) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 65206cf9a..775b9e6d5 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 3.0.0-beta.10 - 2021-09-09 ### Changed * `ContentEncoding` is now marked `#[non_exhaustive]`. [#2377] * Minimum supported Rust version (MSRV) is now 1.51. @@ -16,7 +19,7 @@ [#2377]: https://github.com/actix/actix-web/pull/2377 -## 3.0.0-beta.8 - 2021-08-09 +## 3.0.0-beta.9 - 2021-08-09 ### Fixed * Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index 54505a215..0e0da8f43 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-http" -version = "3.0.0-beta.9" +version = "3.0.0-beta.10" authors = ["Nikolay Kim "] description = "HTTP primitives for the Actix ecosystem" keywords = ["actix", "http", "framework", "async", "futures"] @@ -86,7 +86,7 @@ trust-dns-resolver = { version = "0.20.0", optional = true } [dev-dependencies] actix-server = "2.0.0-beta.3" -actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] } +actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] } actix-tls = { version = "3.0.0-beta.5", features = ["openssl"] } async-stream = "0.3" criterion = { version = "0.3", features = ["html_reports"] } diff --git a/actix-http/README.md b/actix-http/README.md index c509eaff8..b58b47f5c 100644 --- a/actix-http/README.md +++ b/actix-http/README.md @@ -3,11 +3,11 @@ > HTTP primitives for the Actix ecosystem. [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) -[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.9)](https://docs.rs/actix-http/3.0.0-beta.9) +[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.10)](https://docs.rs/actix-http/3.0.0-beta.10) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
-[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.9) +[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.10/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.10) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-multipart/CHANGES.md b/actix-multipart/CHANGES.md index 1e768ddf5..c32583f08 100644 --- a/actix-multipart/CHANGES.md +++ b/actix-multipart/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 0.4.0-beta.6 - 2021-09-09 * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-multipart/Cargo.toml b/actix-multipart/Cargo.toml index 5103407ca..6db81cca9 100644 --- a/actix-multipart/Cargo.toml +++ b/actix-multipart/Cargo.toml @@ -1,13 +1,11 @@ [package] name = "actix-multipart" -version = "0.4.0-beta.5" +version = "0.4.0-beta.6" authors = ["Nikolay Kim "] description = "Multipart form support for Actix Web" -readme = "README.md" keywords = ["http", "web", "framework", "async", "futures"] homepage = "https://actix.rs" repository = "https://github.com/actix/actix-web.git" -documentation = "https://docs.rs/actix-multipart" license = "MIT OR Apache-2.0" edition = "2018" @@ -16,7 +14,7 @@ name = "actix_multipart" path = "src/lib.rs" [dependencies] -actix-web = { version = "4.0.0-beta.8", default-features = false } +actix-web = { version = "4.0.0-beta.9", default-features = false } actix-utils = "3.0.0" bytes = "1" @@ -31,6 +29,6 @@ twoway = "0.2" [dev-dependencies] actix-rt = "2.2" -actix-http = "3.0.0-beta.8" +actix-http = "3.0.0-beta.10" tokio = { version = "1", features = ["sync"] } tokio-stream = "0.1" diff --git a/actix-multipart/README.md b/actix-multipart/README.md index aed16721c..f3366f50c 100644 --- a/actix-multipart/README.md +++ b/actix-multipart/README.md @@ -3,11 +3,11 @@ > Multipart form support for Actix Web. [![crates.io](https://img.shields.io/crates/v/actix-multipart?label=latest)](https://crates.io/crates/actix-multipart) -[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.5)](https://docs.rs/actix-multipart/0.4.0-beta.5) +[![Documentation](https://docs.rs/actix-multipart/badge.svg?version=0.4.0-beta.6)](https://docs.rs/actix-multipart/0.4.0-beta.6) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-multipart.svg)
-[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.5/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.5) +[![dependency status](https://deps.rs/crate/actix-multipart/0.4.0-beta.6/status.svg)](https://deps.rs/crate/actix-multipart/0.4.0-beta.6) [![Download](https://img.shields.io/crates/d/actix-multipart.svg)](https://crates.io/crates/actix-multipart) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-router/CHANGES.md b/actix-router/CHANGES.md index 990382512..001903438 100644 --- a/actix-router/CHANGES.md +++ b/actix-router/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 0.5.0-beta.2 - 2021-09-09 * Introduce `ResourceDef::join`. [#380] * Disallow prefix routes with tail segments. [#379] * Enforce path separators on dynamic prefixes. [#378] diff --git a/actix-router/Cargo.toml b/actix-router/Cargo.toml index 2a2ce1cc1..e32f0edd6 100644 --- a/actix-router/Cargo.toml +++ b/actix-router/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-router" -version = "0.5.0-beta.1" +version = "0.5.0-beta.2" authors = [ "Nikolay Kim ", "Ali MJ Al-Nasrawy ", @@ -8,7 +8,7 @@ authors = [ ] description = "Resource path matching and router" keywords = ["actix", "router", "routing"] -repository = "https://github.com/actix/actix-net.git" +repository = "https://github.com/actix/actix-web.git" license = "MIT OR Apache-2.0" edition = "2018" diff --git a/actix-test/CHANGES.md b/actix-test/CHANGES.md index dc76ba3fd..58e05c4b6 100644 --- a/actix-test/CHANGES.md +++ b/actix-test/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 0.1.0-beta.4 - 2021-09-09 * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-test/Cargo.toml b/actix-test/Cargo.toml index b732cf744..41d32257c 100644 --- a/actix-test/Cargo.toml +++ b/actix-test/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-test" -version = "0.1.0-beta.3" +version = "0.1.0-beta.4" authors = [ "Nikolay Kim ", "Rob Ede ", @@ -20,13 +20,13 @@ openssl = ["tls-openssl", "actix-http/openssl"] [dependencies] actix-codec = "0.4.0" -actix-http = "3.0.0-beta.8" -actix-http-test = { version = "3.0.0-beta.4", features = [] } +actix-http = "3.0.0-beta.10" +actix-http-test = "3.0.0-beta.5" actix-service = "2.0.0" actix-utils = "3.0.0" -actix-web = { version = "4.0.0-beta.8", default-features = false, features = ["cookies"] } +actix-web = { version = "4.0.0-beta.9", default-features = false, features = ["cookies"] } actix-rt = "2.1" -awc = { version = "3.0.0-beta.7", default-features = false, features = ["cookies"] } +awc = { version = "3.0.0-beta.8", default-features = false, features = ["cookies"] } futures-core = { version = "0.3.7", default-features = false, features = ["std"] } futures-util = { version = "0.3.7", default-features = false, features = [] } diff --git a/actix-web-actors/CHANGES.md b/actix-web-actors/CHANGES.md index 084e7b272..2e453063f 100644 --- a/actix-web-actors/CHANGES.md +++ b/actix-web-actors/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 4.0.0-beta.7 - 2021-09-09 * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-web-actors/Cargo.toml b/actix-web-actors/Cargo.toml index fcb5195b8..ef6bd919d 100644 --- a/actix-web-actors/Cargo.toml +++ b/actix-web-actors/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-web-actors" -version = "4.0.0-beta.6" +version = "4.0.0-beta.7" authors = ["Nikolay Kim "] description = "Actix actors support for Actix Web" keywords = ["actix", "http", "web", "framework", "async"] @@ -16,8 +16,8 @@ path = "src/lib.rs" [dependencies] actix = { version = "0.12.0", default-features = false } actix-codec = "0.4.0" -actix-http = "3.0.0-beta.8" -actix-web = { version = "4.0.0-beta.8", default-features = false } +actix-http = "3.0.0-beta.10" +actix-web = { version = "4.0.0-beta.9", default-features = false } bytes = "1" bytestring = "1" @@ -29,6 +29,6 @@ tokio = { version = "1", features = ["sync"] } actix-rt = "2.2" actix-test = "0.1.0-beta.3" -awc = { version = "3.0.0-beta.7", default-features = false } +awc = { version = "3.0.0-beta.8", default-features = false } env_logger = "0.8" futures-util = { version = "0.3.7", default-features = false } diff --git a/actix-web-actors/README.md b/actix-web-actors/README.md index 2858d3f20..a647e4bc9 100644 --- a/actix-web-actors/README.md +++ b/actix-web-actors/README.md @@ -3,11 +3,11 @@ > Actix actors support for Actix Web. [![crates.io](https://img.shields.io/crates/v/actix-web-actors?label=latest)](https://crates.io/crates/actix-web-actors) -[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.6)](https://docs.rs/actix-web-actors/4.0.0-beta.6) +[![Documentation](https://docs.rs/actix-web-actors/badge.svg?version=4.0.0-beta.7)](https://docs.rs/actix-web-actors/4.0.0-beta.7) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-web-actors.svg)
-[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.6) +[![dependency status](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7/status.svg)](https://deps.rs/crate/actix-web-actors/4.0.0-beta.7) [![Download](https://img.shields.io/crates/d/actix-web-actors.svg)](https://crates.io/crates/actix-web-actors) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-web-codegen/CHANGES.md b/actix-web-codegen/CHANGES.md index f0a56b30f..c154d8af4 100644 --- a/actix-web-codegen/CHANGES.md +++ b/actix-web-codegen/CHANGES.md @@ -1,6 +1,9 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 0.5.0-beta.4 - 2021-09-09 * In routing macros, paths are now validated at compile time. [#2350] * Minimum supported Rust version (MSRV) is now 1.51. diff --git a/actix-web-codegen/Cargo.toml b/actix-web-codegen/Cargo.toml index 66f7acf6d..2ad714f40 100644 --- a/actix-web-codegen/Cargo.toml +++ b/actix-web-codegen/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-web-codegen" -version = "0.5.0-beta.3" +version = "0.5.0-beta.4" description = "Routing and runtime macros for Actix Web" readme = "README.md" homepage = "https://actix.rs" @@ -17,13 +17,13 @@ proc-macro = true quote = "1" syn = { version = "1", features = ["full", "parsing"] } proc-macro2 = "1" -actix-router = "0.5.0-beta.1" +actix-router = "0.5.0-beta.2" [dev-dependencies] actix-rt = "2.2" actix-test = "0.1.0-beta.3" actix-utils = "3.0.0" -actix-web = "4.0.0-beta.8" +actix-web = "4.0.0-beta.9" futures-core = { version = "0.3.7", default-features = false, features = ["alloc"] } trybuild = "1" diff --git a/actix-web-codegen/README.md b/actix-web-codegen/README.md index e69cfbbe5..268e8b01d 100644 --- a/actix-web-codegen/README.md +++ b/actix-web-codegen/README.md @@ -3,11 +3,11 @@ > Routing and runtime macros for Actix Web. [![crates.io](https://img.shields.io/crates/v/actix-web-codegen?label=latest)](https://crates.io/crates/actix-web-codegen) -[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.3)](https://docs.rs/actix-web-codegen/0.5.0-beta.3) +[![Documentation](https://docs.rs/actix-web-codegen/badge.svg?version=0.5.0-beta.4)](https://docs.rs/actix-web-codegen/0.5.0-beta.4) [![Version](https://img.shields.io/badge/rustc-1.51+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.51.html) ![License](https://img.shields.io/crates/l/actix-web-codegen.svg)
-[![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.3) +[![dependency status](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4/status.svg)](https://deps.rs/crate/actix-web-codegen/0.5.0-beta.4) [![Download](https://img.shields.io/crates/d/actix-web-codegen.svg)](https://crates.io/crates/actix-web-codegen) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/awc/CHANGES.md b/awc/CHANGES.md index 9c6f258aa..252b62efa 100644 --- a/awc/CHANGES.md +++ b/awc/CHANGES.md @@ -1,11 +1,15 @@ # Changes ## Unreleased - 2021-xx-xx + + +## 3.0.0-beta.8 - 2021-09-09 ### Changed * Send headers within the redirect requests. [#2310] [#2310]: https://github.com/actix/actix-web/pull/2310 + ## 3.0.0-beta.7 - 2021-06-26 ### Changed * Change compression algorithm features flags. [#2250] diff --git a/awc/Cargo.toml b/awc/Cargo.toml index 016d3b48b..262c3dce5 100644 --- a/awc/Cargo.toml +++ b/awc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "awc" -version = "3.0.0-beta.7" +version = "3.0.0-beta.8" authors = [ "Nikolay Kim ", "fakeshadow <24548779@qq.com>", @@ -55,7 +55,7 @@ __compress = [] [dependencies] actix-codec = "0.4.0" actix-service = "2.0.0" -actix-http = "3.0.0-beta.8" +actix-http = "3.0.0-beta.10" actix-rt = { version = "2.1", default-features = false } base64 = "0.13" @@ -77,9 +77,9 @@ tls-openssl = { version = "0.10.9", package = "openssl", optional = true } tls-rustls = { version = "0.19.0", package = "rustls", optional = true, features = ["dangerous_configuration"] } [dev-dependencies] -actix-web = { version = "4.0.0-beta.8", features = ["openssl"] } -actix-http = { version = "3.0.0-beta.8", features = ["openssl"] } -actix-http-test = { version = "3.0.0-beta.4", features = ["openssl"] } +actix-web = { version = "4.0.0-beta.9", features = ["openssl"] } +actix-http = { version = "3.0.0-beta.10", features = ["openssl"] } +actix-http-test = { version = "3.0.0-beta.5", features = ["openssl"] } actix-utils = "3.0.0" actix-server = "2.0.0-beta.3" actix-tls = { version = "3.0.0-beta.5", features = ["openssl", "rustls"] } diff --git a/awc/README.md b/awc/README.md index fe91383ca..868bc5cae 100644 --- a/awc/README.md +++ b/awc/README.md @@ -3,9 +3,9 @@ > Async HTTP and WebSocket client library. [![crates.io](https://img.shields.io/crates/v/awc?label=latest)](https://crates.io/crates/awc) -[![Documentation](https://docs.rs/awc/badge.svg?version=3.0.0-beta.7)](https://docs.rs/awc/3.0.0-beta.7) +[![Documentation](https://docs.rs/awc/badge.svg?version=3.0.0-beta.8)](https://docs.rs/awc/3.0.0-beta.8) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/awc) -[![Dependency Status](https://deps.rs/crate/awc/3.0.0-beta.7/status.svg)](https://deps.rs/crate/awc/3.0.0-beta.7) +[![Dependency Status](https://deps.rs/crate/awc/3.0.0-beta.8/status.svg)](https://deps.rs/crate/awc/3.0.0-beta.8) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) ## Documentation & Resources From 46699e34299ed14401df8ea8022efe47a83041e0 Mon Sep 17 00:00:00 2001 From: Rob Ede Date: Sat, 11 Sep 2021 00:01:01 +0100 Subject: [PATCH 22/23] remove time dep from actix-http (#2383) --- Cargo.toml | 2 +- actix-http-test/Cargo.toml | 1 - actix-http-test/src/lib.rs | 11 ++- actix-http/Cargo.toml | 7 +- actix-http/src/config.rs | 29 +++---- actix-http/src/header/shared/http_date.rs | 82 +++++++++++++++++++ actix-http/src/header/shared/httpdate.rs | 97 ----------------------- actix-http/src/header/shared/mod.rs | 4 +- actix-http/src/lib.rs | 1 - actix-http/src/time_parser.rs | 72 ----------------- actix-http/tests/test_server.rs | 1 + src/middleware/logger.rs | 6 +- 12 files changed, 109 insertions(+), 204 deletions(-) create mode 100644 actix-http/src/header/shared/http_date.rs delete mode 100644 actix-http/src/header/shared/httpdate.rs delete mode 100644 actix-http/src/time_parser.rs diff --git a/Cargo.toml b/Cargo.toml index 60525f3ac..73a52182c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -101,7 +101,7 @@ serde_json = "1.0" serde_urlencoded = "0.7" smallvec = "1.6.1" socket2 = "0.4.0" -time = { version = "0.2.23", default-features = false, features = ["std"] } +time = { version = "0.3", default-features = false, features = ["formatting"] } url = "2.1" [dev-dependencies] diff --git a/actix-http-test/Cargo.toml b/actix-http-test/Cargo.toml index e7fe7adc0..ee4971a1e 100644 --- a/actix-http-test/Cargo.toml +++ b/actix-http-test/Cargo.toml @@ -47,7 +47,6 @@ serde = "1.0" serde_json = "1.0" slab = "0.4" serde_urlencoded = "0.7" -time = { version = "0.2.23", default-features = false, features = ["std"] } tls-openssl = { version = "0.10.9", package = "openssl", optional = true } [dev-dependencies] diff --git a/actix-http-test/src/lib.rs b/actix-http-test/src/lib.rs index 0f126c99a..ec7b46ffb 100644 --- a/actix-http-test/src/lib.rs +++ b/actix-http-test/src/lib.rs @@ -7,8 +7,7 @@ #[cfg(feature = "openssl")] extern crate tls_openssl as openssl; -use std::sync::mpsc; -use std::{net, thread, time}; +use std::{net, sync::mpsc, thread, time::Duration}; use actix_codec::{AsyncRead, AsyncWrite, Framed}; use actix_rt::{net::TcpStream, System}; @@ -95,15 +94,15 @@ pub async fn test_server_with_addr>( .set_alpn_protos(b"\x02h2\x08http/1.1") .map_err(|e| log::error!("Can not set alpn protocol: {:?}", e)); Connector::new() - .conn_lifetime(time::Duration::from_secs(0)) - .timeout(time::Duration::from_millis(30000)) + .conn_lifetime(Duration::from_secs(0)) + .timeout(Duration::from_millis(30000)) .ssl(builder.build()) } #[cfg(not(feature = "openssl"))] { Connector::new() - .conn_lifetime(time::Duration::from_secs(0)) - .timeout(time::Duration::from_millis(30000)) + .conn_lifetime(Duration::from_secs(0)) + .timeout(Duration::from_millis(30000)) } }; diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index 0e0da8f43..889c91331 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -60,6 +60,7 @@ futures-util = { version = "0.3.7", default-features = false, features = ["alloc h2 = "0.3.1" http = "0.2.2" httparse = "1.5.1" +httpdate = "1.0.1" itoa = "0.4" language-tags = "0.3" local-channel = "0.1" @@ -70,11 +71,8 @@ percent-encoding = "2.1" pin-project = "1.0.0" pin-project-lite = "0.2" rand = "0.8" -regex = "1.3" -serde = "1.0" sha-1 = "0.9" smallvec = "1.6.1" -time = { version = "0.2.23", default-features = false, features = ["std"] } tokio = { version = "1.2", features = ["sync"] } # compression @@ -92,11 +90,12 @@ async-stream = "0.3" criterion = { version = "0.3", features = ["html_reports"] } env_logger = "0.8" rcgen = "0.8" +regex = "1.3" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" tls-openssl = { version = "0.10", package = "openssl" } tls-rustls = { version = "0.19", package = "rustls" } -webpki = { version = "0.21.0" } +webpki = { version = "0.21" } [[example]] name = "ws" diff --git a/actix-http/src/config.rs b/actix-http/src/config.rs index 97750ff76..069099b8c 100644 --- a/actix-http/src/config.rs +++ b/actix-http/src/config.rs @@ -1,18 +1,19 @@ -use std::cell::Cell; -use std::fmt::Write; -use std::rc::Rc; -use std::time::Duration; -use std::{fmt, net}; +use std::{ + cell::Cell, + fmt::{self, Write}, + net, + rc::Rc, + time::{Duration, SystemTime}, +}; use actix_rt::{ task::JoinHandle, time::{interval, sleep_until, Instant, Sleep}, }; use bytes::BytesMut; -use time::OffsetDateTime; /// "Sun, 06 Nov 1994 08:49:37 GMT".len() -const DATE_VALUE_LENGTH: usize = 29; +pub(crate) const DATE_VALUE_LENGTH: usize = 29; #[derive(Debug, PartialEq, Clone, Copy)] /// Server keep-alive setting @@ -206,12 +207,7 @@ impl Date { fn update(&mut self) { self.pos = 0; - write!( - self, - "{}", - OffsetDateTime::now_utc().format("%a, %d %b %Y %H:%M:%S GMT") - ) - .unwrap(); + write!(self, "{}", httpdate::fmt_http_date(SystemTime::now())).unwrap(); } } @@ -269,11 +265,11 @@ impl DateService { } // TODO: move to a util module for testing all spawn handle drop style tasks. -#[cfg(test)] /// Test Module for checking the drop state of certain async tasks that are spawned /// with `actix_rt::spawn` /// /// The target task must explicitly generate `NotifyOnDrop` when spawn the task +#[cfg(test)] mod notify_on_drop { use std::cell::RefCell; @@ -283,9 +279,8 @@ mod notify_on_drop { /// Check if the spawned task is dropped. /// - /// # Panic: - /// - /// When there was no `NotifyOnDrop` instance on current thread + /// # Panics + /// Panics when there was no `NotifyOnDrop` instance on current thread. pub(crate) fn is_dropped() -> bool { NOTIFY_DROPPED.with(|bool| { bool.borrow() diff --git a/actix-http/src/header/shared/http_date.rs b/actix-http/src/header/shared/http_date.rs new file mode 100644 index 000000000..3441f90af --- /dev/null +++ b/actix-http/src/header/shared/http_date.rs @@ -0,0 +1,82 @@ +use std::{fmt, io::Write, str::FromStr, time::SystemTime}; + +use bytes::BytesMut; +use http::header::{HeaderValue, InvalidHeaderValue}; + +use crate::{ + config::DATE_VALUE_LENGTH, error::ParseError, header::IntoHeaderValue, + helpers::MutWriter, +}; + +/// A timestamp with HTTP formatting and parsing. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct HttpDate(SystemTime); + +impl FromStr for HttpDate { + type Err = ParseError; + + fn from_str(s: &str) -> Result { + match httpdate::parse_http_date(s) { + Ok(sys_time) => Ok(HttpDate(sys_time)), + Err(_) => Err(ParseError::Header), + } + } +} + +impl fmt::Display for HttpDate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let date_str = httpdate::fmt_http_date(self.0); + f.write_str(&date_str) + } +} + +impl IntoHeaderValue for HttpDate { + type Error = InvalidHeaderValue; + + fn try_into_value(self) -> Result { + let mut buf = BytesMut::with_capacity(DATE_VALUE_LENGTH); + let mut wrt = MutWriter(&mut buf); + + // unwrap: date output is known to be well formed and of known length + write!(wrt, "{}", httpdate::fmt_http_date(self.0)).unwrap(); + + HeaderValue::from_maybe_shared(buf.split().freeze()) + } +} + +impl From for HttpDate { + fn from(sys_time: SystemTime) -> HttpDate { + HttpDate(sys_time) + } +} + +impl From for SystemTime { + fn from(HttpDate(sys_time): HttpDate) -> SystemTime { + sys_time + } +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use super::*; + + #[test] + fn date_header() { + macro_rules! assert_parsed_date { + ($case:expr, $exp:expr) => { + assert_eq!($case.parse::().unwrap(), $exp); + }; + } + + // 784198117 = SystemTime::from(datetime!(1994-11-07 08:48:37).assume_utc()).duration_since(SystemTime::UNIX_EPOCH)); + let nov_07 = HttpDate(SystemTime::UNIX_EPOCH + Duration::from_secs(784198117)); + + assert_parsed_date!("Mon, 07 Nov 1994 08:48:37 GMT", nov_07); + assert_parsed_date!("Monday, 07-Nov-94 08:48:37 GMT", nov_07); + assert_parsed_date!("Mon Nov 7 08:48:37 1994", nov_07); + + assert!("this-is-no-date".parse::().is_err()); + } +} diff --git a/actix-http/src/header/shared/httpdate.rs b/actix-http/src/header/shared/httpdate.rs deleted file mode 100644 index 18278a6d8..000000000 --- a/actix-http/src/header/shared/httpdate.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::{ - fmt, - io::Write, - str::FromStr, - time::{SystemTime, UNIX_EPOCH}, -}; - -use bytes::buf::BufMut; -use bytes::BytesMut; -use http::header::{HeaderValue, InvalidHeaderValue}; -use time::{OffsetDateTime, PrimitiveDateTime, UtcOffset}; - -use crate::error::ParseError; -use crate::header::IntoHeaderValue; -use crate::time_parser; - -/// A timestamp with HTTP formatting and parsing. -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct HttpDate(OffsetDateTime); - -impl FromStr for HttpDate { - type Err = ParseError; - - fn from_str(s: &str) -> Result { - match time_parser::parse_http_date(s) { - Some(t) => Ok(HttpDate(t.assume_utc())), - None => Err(ParseError::Header), - } - } -} - -impl fmt::Display for HttpDate { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.0.format("%a, %d %b %Y %H:%M:%S GMT"), f) - } -} - -impl From for HttpDate { - fn from(sys: SystemTime) -> HttpDate { - HttpDate(PrimitiveDateTime::from(sys).assume_utc()) - } -} - -impl IntoHeaderValue for HttpDate { - type Error = InvalidHeaderValue; - - fn try_into_value(self) -> Result { - let mut wrt = BytesMut::with_capacity(29).writer(); - write!( - wrt, - "{}", - self.0 - .to_offset(UtcOffset::UTC) - .format("%a, %d %b %Y %H:%M:%S GMT") - ) - .unwrap(); - HeaderValue::from_maybe_shared(wrt.get_mut().split().freeze()) - } -} - -impl From for SystemTime { - fn from(date: HttpDate) -> SystemTime { - let dt = date.0; - let epoch = OffsetDateTime::unix_epoch(); - - UNIX_EPOCH + (dt - epoch) - } -} - -#[cfg(test)] -mod tests { - use super::HttpDate; - use time::{date, time, PrimitiveDateTime}; - - #[test] - fn test_date() { - let nov_07 = HttpDate( - PrimitiveDateTime::new(date!(1994 - 11 - 07), time!(8:48:37)).assume_utc(), - ); - - assert_eq!( - "Sun, 07 Nov 1994 08:48:37 GMT".parse::().unwrap(), - nov_07 - ); - assert_eq!( - "Sunday, 07-Nov-94 08:48:37 GMT" - .parse::() - .unwrap(), - nov_07 - ); - assert_eq!( - "Sun Nov 7 08:48:37 1994".parse::().unwrap(), - nov_07 - ); - assert!("this-is-no-date".parse::().is_err()); - } -} diff --git a/actix-http/src/header/shared/mod.rs b/actix-http/src/header/shared/mod.rs index b8f9173f9..274e13146 100644 --- a/actix-http/src/header/shared/mod.rs +++ b/actix-http/src/header/shared/mod.rs @@ -3,12 +3,12 @@ mod charset; mod content_encoding; mod extended; -mod httpdate; +mod http_date; mod quality_item; pub use self::charset::Charset; pub use self::content_encoding::ContentEncoding; pub use self::extended::{parse_extended_value, ExtendedValue}; -pub use self::httpdate::HttpDate; +pub use self::http_date::HttpDate; pub use self::quality_item::{q, qitem, Quality, QualityItem}; pub use language_tags::LanguageTag; diff --git a/actix-http/src/lib.rs b/actix-http/src/lib.rs index 17ee3ff29..3ad8d095e 100644 --- a/actix-http/src/lib.rs +++ b/actix-http/src/lib.rs @@ -44,7 +44,6 @@ mod request; mod response; mod response_builder; mod service; -mod time_parser; pub mod error; pub mod h1; diff --git a/actix-http/src/time_parser.rs b/actix-http/src/time_parser.rs deleted file mode 100644 index fd82fd42e..000000000 --- a/actix-http/src/time_parser.rs +++ /dev/null @@ -1,72 +0,0 @@ -use time::{Date, OffsetDateTime, PrimitiveDateTime}; - -/// Attempt to parse a `time` string as one of either RFC 1123, RFC 850, or asctime. -pub(crate) fn parse_http_date(time: &str) -> Option { - try_parse_rfc_1123(time) - .or_else(|| try_parse_rfc_850(time)) - .or_else(|| try_parse_asctime(time)) -} - -/// Attempt to parse a `time` string as a RFC 1123 formatted date time string. -/// -/// Eg: `Fri, 12 Feb 2021 00:14:29 GMT` -fn try_parse_rfc_1123(time: &str) -> Option { - time::parse(time, "%a, %d %b %Y %H:%M:%S").ok() -} - -/// Attempt to parse a `time` string as a RFC 850 formatted date time string. -/// -/// Eg: `Wednesday, 11-Jan-21 13:37:41 UTC` -fn try_parse_rfc_850(time: &str) -> Option { - let dt = PrimitiveDateTime::parse(time, "%A, %d-%b-%y %H:%M:%S").ok()?; - - // If the `time` string contains a two-digit year, then as per RFC 2616 ยง 19.3, - // we consider the year as part of this century if it's within the next 50 years, - // otherwise we consider as part of the previous century. - - let now = OffsetDateTime::now_utc(); - let century_start_year = (now.year() / 100) * 100; - let mut expanded_year = century_start_year + dt.year(); - - if expanded_year > now.year() + 50 { - expanded_year -= 100; - } - - let date = Date::try_from_ymd(expanded_year, dt.month(), dt.day()).ok()?; - Some(PrimitiveDateTime::new(date, dt.time())) -} - -/// Attempt to parse a `time` string using ANSI C's `asctime` format. -/// -/// Eg: `Wed Feb 13 15:46:11 2013` -fn try_parse_asctime(time: &str) -> Option { - time::parse(time, "%a %b %_d %H:%M:%S %Y").ok() -} - -#[cfg(test)] -mod tests { - use time::{date, time}; - - use super::*; - - #[test] - fn test_rfc_850_year_shift() { - let date = try_parse_rfc_850("Friday, 19-Nov-82 16:14:55 EST").unwrap(); - assert_eq!(date, date!(1982 - 11 - 19).with_time(time!(16:14:55))); - - let date = try_parse_rfc_850("Wednesday, 11-Jan-62 13:37:41 EST").unwrap(); - assert_eq!(date, date!(2062 - 01 - 11).with_time(time!(13:37:41))); - - let date = try_parse_rfc_850("Wednesday, 11-Jan-21 13:37:41 EST").unwrap(); - assert_eq!(date, date!(2021 - 01 - 11).with_time(time!(13:37:41))); - - let date = try_parse_rfc_850("Wednesday, 11-Jan-23 13:37:41 EST").unwrap(); - assert_eq!(date, date!(2023 - 01 - 11).with_time(time!(13:37:41))); - - let date = try_parse_rfc_850("Wednesday, 11-Jan-99 13:37:41 EST").unwrap(); - assert_eq!(date, date!(1999 - 01 - 11).with_time(time!(13:37:41))); - - let date = try_parse_rfc_850("Wednesday, 11-Jan-00 13:37:41 EST").unwrap(); - assert_eq!(date, date!(2000 - 01 - 11).with_time(time!(13:37:41))); - } -} diff --git a/actix-http/tests/test_server.rs b/actix-http/tests/test_server.rs index 1e6d0b637..c04aeae00 100644 --- a/actix-http/tests/test_server.rs +++ b/actix-http/tests/test_server.rs @@ -183,6 +183,7 @@ async fn test_chunked_payload() { Some(caps) => caps.get(1).unwrap().as_str().parse().unwrap(), None => panic!("Failed to find size in HTTP Response: {}", data), }; + size }; diff --git a/src/middleware/logger.rs b/src/middleware/logger.rs index 9574b02f7..961eca496 100644 --- a/src/middleware/logger.rs +++ b/src/middleware/logger.rs @@ -18,7 +18,7 @@ use bytes::Bytes; use futures_core::ready; use log::{debug, warn}; use regex::{Regex, RegexSet}; -use time::OffsetDateTime; +use time::{format_description::well_known::Rfc3339, OffsetDateTime}; use crate::{ dev::{BodySize, MessageBody}, @@ -538,7 +538,7 @@ impl FormatText { }; } FormatText::UrlPath => *self = FormatText::Str(req.path().to_string()), - FormatText::RequestTime => *self = FormatText::Str(now.format("%Y-%m-%dT%H:%M:%S")), + FormatText::RequestTime => *self = FormatText::Str(now.format(&Rfc3339).unwrap()), FormatText::RequestHeader(ref name) => { let s = if let Some(val) = req.headers().get(name) { if let Ok(s) = val.to_str() { @@ -767,7 +767,7 @@ mod tests { Ok(()) }; let s = format!("{}", FormatDisplay(&render)); - assert!(s.contains(&now.format("%Y-%m-%dT%H:%M:%S"))); + assert!(s.contains(&now.format(&Rfc3339).unwrap())); } #[actix_rt::test] From 8ae278cb68eda1e6c4fbd3463b018e0f0fe1c313 Mon Sep 17 00:00:00 2001 From: Arniu Tseng Date: Sat, 11 Sep 2021 08:11:16 +0800 Subject: [PATCH 23/23] Remove `FromRequest::Config` (#2233) Co-authored-by: Jonas Platte Co-authored-by: Igor Aleksanov Co-authored-by: Rob Ede --- CHANGES.md | 3 ++ Cargo.toml | 1 + MIGRATION.md | 2 ++ actix-files/src/path_buf.rs | 1 - actix-multipart/src/extractor.rs | 1 - src/data.rs | 1 - src/extract.rs | 55 ++++++++++++++++++++------------ src/info.rs | 2 -- src/request.rs | 1 - src/request_data.rs | 1 - src/types/either.rs | 1 - src/types/form.rs | 30 +++++++++-------- src/types/header.rs | 1 - src/types/json.rs | 1 - src/types/path.rs | 3 +- src/types/payload.rs | 17 ++++------ src/types/query.rs | 3 +- 17 files changed, 66 insertions(+), 58 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 398ac477a..d8831602d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,7 +1,10 @@ # Changes ## Unreleased - 2021-xx-xx +### Changed +* Asscociated type `FromRequest::Config` was removed. [#2233] +[#2233]: https://github.com/actix/actix-web/pull/2233 ## 4.0.0-beta.9 - 2021-09-09 ### Added diff --git a/Cargo.toml b/Cargo.toml index 73a52182c..dc7e9af3f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,6 +18,7 @@ edition = "2018" [package.metadata.docs.rs] # features that docs.rs will build with features = ["openssl", "rustls", "compress-brotli", "compress-gzip", "compress-zstd", "cookies", "secure-cookies"] +rustdoc-args = ["--cfg", "docsrs"] [lib] name = "actix_web" diff --git a/MIGRATION.md b/MIGRATION.md index 9a70adb95..d53bd7bf8 100644 --- a/MIGRATION.md +++ b/MIGRATION.md @@ -11,6 +11,8 @@ Alternatively, explicitly require trailing slashes: `NormalizePath::new(TrailingSlash::Always)`. +* The `type Config` of `FromRequest` was removed. + * Feature flag `compress` has been split into its supported algorithm (brotli, gzip, zstd). By default all compression algorithms are enabled. To select algorithm you want to include with `middleware::Compress` use following flags: diff --git a/actix-files/src/path_buf.rs b/actix-files/src/path_buf.rs index 8a87acd5d..76f589307 100644 --- a/actix-files/src/path_buf.rs +++ b/actix-files/src/path_buf.rs @@ -59,7 +59,6 @@ impl AsRef for PathBufWrap { impl FromRequest for PathBufWrap { type Error = UriSegmentError; type Future = Ready>; - type Config = (); fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { ready(req.match_info().path().parse()) diff --git a/actix-multipart/src/extractor.rs b/actix-multipart/src/extractor.rs index c87f8cc2d..1ad1f203d 100644 --- a/actix-multipart/src/extractor.rs +++ b/actix-multipart/src/extractor.rs @@ -33,7 +33,6 @@ use crate::server::Multipart; impl FromRequest for Multipart { type Error = Error; type Future = Ready>; - type Config = (); #[inline] fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { diff --git a/src/data.rs b/src/data.rs index 174faba37..9d4fe0840 100644 --- a/src/data.rs +++ b/src/data.rs @@ -120,7 +120,6 @@ where } impl FromRequest for Data { - type Config = (); type Error = Error; type Future = Ready>; diff --git a/src/extract.rs b/src/extract.rs index 592f7ab83..39062dd1c 100644 --- a/src/extract.rs +++ b/src/extract.rs @@ -13,13 +13,42 @@ use futures_core::ready; use crate::{dev::Payload, Error, HttpRequest}; -/// Trait implemented by types that can be extracted from request. +/// A type that implements [`FromRequest`] is called an **extractor** and can extract data +/// from the request. Examples of types that implement this trait are [`Json`], [`Form`], [`Path`]. /// -/// Types that implement this trait can be used with `Route` handlers. +/// An extractor can be customized by injecting the corresponding configuration with one of: +/// +/// - [`App::app_data()`](`crate::App::app_data`) +/// - [`Scope::app_data()`](`crate::Scope::app_data`) +/// - [`Resource::app_data()`](`crate::Resource::app_data`) +/// +/// Here are some built-in extractors and their corresponding configuration. +/// Please refer to the respective documentation for details. +/// +/// | Extractor | Configuration | +/// |-------------|-------------------| +/// | [`Json`] | [`JsonConfig`] | +/// | [`Form`] | [`FormConfig`] | +/// | [`Path`] | [`PathConfig`] | +/// | [`Query`] | [`QueryConfig`] | +/// | [`Payload`] | [`PayloadConfig`] | +/// | [`String`] | [`PayloadConfig`] | +/// | [`Bytes`] | [`PayloadConfig`] | +/// +/// [`Json`]: crate::web::Json +/// [`JsonConfig`]: crate::web::JsonConfig +/// [`Form`]: crate::web::Form +/// [`FormConfig`]: crate::web::FormConfig +/// [`Path`]: crate::web::Path +/// [`PathConfig`]: crate::web::PathConfig +/// [`Query`]: crate::web::Query +/// [`QueryConfig`]: crate::web::QueryConfig +/// [`Payload`]: crate::web::Payload +/// [`PayloadConfig`]: crate::web::PayloadConfig +/// [`String`]: FromRequest#impl-FromRequest-for-String +/// [`Bytes`]: crate::web::Bytes#impl-FromRequest +#[cfg_attr(docsrs, doc(alias = "Extractor"))] pub trait FromRequest: Sized { - /// Configuration for this extractor. - type Config: Default + 'static; - /// The associated error which can be returned. type Error: Into; @@ -35,14 +64,6 @@ pub trait FromRequest: Sized { fn extract(req: &HttpRequest) -> Self::Future { Self::from_request(req, &mut Payload::None) } - - /// Create and configure config instance. - fn configure(f: F) -> Self::Config - where - F: FnOnce(Self::Config) -> Self::Config, - { - f(Self::Config::default()) - } } /// Optionally extract a field from the request @@ -65,7 +86,6 @@ pub trait FromRequest: Sized { /// impl FromRequest for Thing { /// type Error = Error; /// type Future = Ready>; -/// type Config = (); /// /// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { /// if rand::random() { @@ -100,7 +120,6 @@ where { type Error = Error; type Future = FromRequestOptFuture; - type Config = T::Config; #[inline] fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { @@ -156,7 +175,6 @@ where /// impl FromRequest for Thing { /// type Error = Error; /// type Future = Ready>; -/// type Config = (); /// /// fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { /// if rand::random() { @@ -189,7 +207,6 @@ where { type Error = Error; type Future = FromRequestResFuture; - type Config = T::Config; #[inline] fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { @@ -233,7 +250,6 @@ where impl FromRequest for Uri { type Error = Infallible; type Future = Ready>; - type Config = (); fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { ok(req.uri().clone()) @@ -255,7 +271,6 @@ impl FromRequest for Uri { impl FromRequest for Method { type Error = Infallible; type Future = Ready>; - type Config = (); fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { ok(req.method().clone()) @@ -266,7 +281,6 @@ impl FromRequest for Method { impl FromRequest for () { type Error = Infallible; type Future = Ready>; - type Config = (); fn from_request(_: &HttpRequest, _: &mut Payload) -> Self::Future { ok(()) @@ -306,7 +320,6 @@ macro_rules! tuple_from_req ({$fut_type:ident, $(($n:tt, $T:ident)),+} => { { type Error = Error; type Future = $fut_type<$($T),+>; - type Config = ($($T::Config),+); fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { $fut_type { diff --git a/src/info.rs b/src/info.rs index de8ad67ee..d928a1e63 100644 --- a/src/info.rs +++ b/src/info.rs @@ -209,7 +209,6 @@ impl ConnectionInfo { impl FromRequest for ConnectionInfo { type Error = Infallible; type Future = Ready>; - type Config = (); fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { ok(req.connection_info().clone()) @@ -252,7 +251,6 @@ impl ResponseError for MissingPeerAddr {} impl FromRequest for PeerAddr { type Error = MissingPeerAddr; type Future = Ready>; - type Config = (); fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { match req.peer_addr() { diff --git a/src/request.rs b/src/request.rs index c25a5397a..0027f9b4b 100644 --- a/src/request.rs +++ b/src/request.rs @@ -358,7 +358,6 @@ impl Drop for HttpRequest { /// } /// ``` impl FromRequest for HttpRequest { - type Config = (); type Error = Error; type Future = Ready>; diff --git a/src/request_data.rs b/src/request_data.rs index 581943015..575dc1eb3 100644 --- a/src/request_data.rs +++ b/src/request_data.rs @@ -64,7 +64,6 @@ impl Deref for ReqData { } impl FromRequest for ReqData { - type Config = (); type Error = Error; type Future = Ready>; diff --git a/src/types/either.rs b/src/types/either.rs index 35e63cec9..5700b63c7 100644 --- a/src/types/either.rs +++ b/src/types/either.rs @@ -187,7 +187,6 @@ where { type Error = EitherExtractError; type Future = EitherExtractFut; - type Config = (); fn from_request(req: &HttpRequest, payload: &mut dev::Payload) -> Self::Future { EitherExtractFut { diff --git a/src/types/form.rs b/src/types/form.rs index 2ace0e063..71100eb97 100644 --- a/src/types/form.rs +++ b/src/types/form.rs @@ -126,20 +126,12 @@ impl FromRequest for Form where T: DeserializeOwned + 'static, { - type Config = FormConfig; type Error = Error; type Future = FormExtractFut; #[inline] fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { - let (limit, err_handler) = req - .app_data::() - .or_else(|| { - req.app_data::>() - .map(|d| d.as_ref()) - }) - .map(|c| (c.limit, c.err_handler.clone())) - .unwrap_or((16384, None)); + let FormConfig { limit, err_handler } = FormConfig::from_req(req).clone(); FormExtractFut { fut: UrlEncoded::new(req, payload).limit(limit), @@ -241,14 +233,26 @@ impl FormConfig { self.err_handler = Some(Rc::new(f)); self } + + /// Extract payload config from app data. + /// + /// Checks both `T` and `Data`, in that order, and falls back to the default payload config. + fn from_req(req: &HttpRequest) -> &Self { + req.app_data::() + .or_else(|| req.app_data::>().map(|d| d.as_ref())) + .unwrap_or(&DEFAULT_CONFIG) + } } +/// Allow shared refs used as default. +const DEFAULT_CONFIG: FormConfig = FormConfig { + limit: 16_384, // 2^14 bytes (~16kB) + err_handler: None, +}; + impl Default for FormConfig { fn default() -> Self { - FormConfig { - limit: 16_384, // 2^14 bytes (~16kB) - err_handler: None, - } + DEFAULT_CONFIG } } diff --git a/src/types/header.rs b/src/types/header.rs index 9b64f445d..6ea77faf6 100644 --- a/src/types/header.rs +++ b/src/types/header.rs @@ -62,7 +62,6 @@ where { type Error = ParseError; type Future = Ready>; - type Config = (); #[inline] fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { diff --git a/src/types/json.rs b/src/types/json.rs index 8c2f51a68..19443ea96 100644 --- a/src/types/json.rs +++ b/src/types/json.rs @@ -130,7 +130,6 @@ impl Responder for Json { impl FromRequest for Json { type Error = Error; type Future = JsonExtractFut; - type Config = JsonConfig; #[inline] fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { diff --git a/src/types/path.rs b/src/types/path.rs index 4052646e3..aed897fa9 100644 --- a/src/types/path.rs +++ b/src/types/path.rs @@ -97,12 +97,11 @@ where { type Error = Error; type Future = Ready>; - type Config = PathConfig; #[inline] fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { let error_handler = req - .app_data::() + .app_data::() .and_then(|c| c.ehandler.clone()); ready( diff --git a/src/types/payload.rs b/src/types/payload.rs index 188da6201..46ad96beb 100644 --- a/src/types/payload.rs +++ b/src/types/payload.rs @@ -63,7 +63,6 @@ impl Stream for Payload { /// See [here](#usage) for example of usage as an extractor. impl FromRequest for Payload { - type Config = PayloadConfig; type Error = Error; type Future = Ready>; @@ -90,7 +89,6 @@ impl FromRequest for Payload { /// } /// ``` impl FromRequest for Bytes { - type Config = PayloadConfig; type Error = Error; type Future = Either>>; @@ -126,8 +124,7 @@ impl<'a> Future for BytesExtractFut { /// /// Text extractor automatically decode body according to the request's charset. /// -/// [**PayloadConfig**](PayloadConfig) allows to configure -/// extraction process. +/// Use [`PayloadConfig`] to configure extraction process. /// /// # Examples /// ``` @@ -139,7 +136,6 @@ impl<'a> Future for BytesExtractFut { /// format!("Body {}!", text) /// } impl FromRequest for String { - type Config = PayloadConfig; type Error = Error; type Future = Either>>; @@ -198,14 +194,15 @@ fn bytes_to_string(body: Bytes, encoding: &'static Encoding) -> Result fmt::Display for Query { impl FromRequest for Query { type Error = Error; type Future = Ready>; - type Config = QueryConfig; #[inline] fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future { let error_handler = req - .app_data::() + .app_data::() .and_then(|c| c.err_handler.clone()); serde_urlencoded::from_str::(req.query_string())