diff --git a/actix-files/src/error.rs b/actix-files/src/error.rs index e5f2d4779..f8e32eef7 100644 --- a/actix-files/src/error.rs +++ b/actix-files/src/error.rs @@ -21,6 +21,7 @@ impl ResponseError for FilesError { } } +#[allow(clippy::enum_variant_names)] #[derive(Display, Debug, PartialEq)] pub enum UriSegmentError { /// The segment started with the wrapped invalid character. diff --git a/actix-http/CHANGES.md b/actix-http/CHANGES.md index 8ead43718..9ed28105f 100644 --- a/actix-http/CHANGES.md +++ b/actix-http/CHANGES.md @@ -2,6 +2,15 @@ ## Unreleased - 2021-xx-xx +### Fixed +* Remove slice creation pointing to potential uninitialized data on h1 encoder. [#2364] + +[#2364]: https://github.com/actix/actix-web/pull/2364 + +## 3.0.0-beta.8 - 2021-08-09 +### Fixed +* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) + ## 3.0.0-beta.8 - 2021-06-26 ### Changed @@ -210,6 +219,11 @@ [#1878]: https://github.com/actix/actix-web/pull/1878 +## 2.2.1 - 2021-08-09 +### Fixed +* Potential HTTP request smuggling vulnerabilities. [RUSTSEC-2021-0081](https://github.com/rustsec/advisory-db/pull/977) + + ## 2.2.0 - 2020-11-25 ### Added * HttpResponse builders for 1xx status codes. [#1768] diff --git a/actix-http/Cargo.toml b/actix-http/Cargo.toml index a12fed4b9..4ce55dca1 100644 --- a/actix-http/Cargo.toml +++ b/actix-http/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "actix-http" -version = "3.0.0-beta.8" +version = "3.0.0-beta.9" authors = ["Nikolay Kim "] description = "HTTP primitives for the Actix ecosystem" keywords = ["actix", "http", "framework", "async", "futures"] diff --git a/actix-http/README.md b/actix-http/README.md index de1ef0a9b..5b06583bc 100644 --- a/actix-http/README.md +++ b/actix-http/README.md @@ -3,11 +3,11 @@ > HTTP primitives for the Actix ecosystem. [![crates.io](https://img.shields.io/crates/v/actix-http?label=latest)](https://crates.io/crates/actix-http) -[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.8)](https://docs.rs/actix-http/3.0.0-beta.8) +[![Documentation](https://docs.rs/actix-http/badge.svg?version=3.0.0-beta.9)](https://docs.rs/actix-http/3.0.0-beta.9) [![Version](https://img.shields.io/badge/rustc-1.46+-ab6000.svg)](https://blog.rust-lang.org/2020/03/12/Rust-1.46.html) ![MIT or Apache 2.0 licensed](https://img.shields.io/crates/l/actix-http.svg)
-[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.8/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.8) +[![dependency status](https://deps.rs/crate/actix-http/3.0.0-beta.9/status.svg)](https://deps.rs/crate/actix-http/3.0.0-beta.9) [![Download](https://img.shields.io/crates/d/actix-http.svg)](https://crates.io/crates/actix-http) [![Chat on Discord](https://img.shields.io/discord/771444961383153695?label=chat&logo=discord)](https://discord.gg/NWpN5mmg3x) diff --git a/actix-http/benches/write-camel-case.rs b/actix-http/benches/write-camel-case.rs index fa4930eb9..ccf09b37e 100644 --- a/actix-http/benches/write-camel-case.rs +++ b/actix-http/benches/write-camel-case.rs @@ -18,7 +18,8 @@ fn bench_write_camel_case(c: &mut Criterion) { group.bench_with_input(BenchmarkId::new("New", i), bts, |b, bts| { b.iter(|| { let mut buf = black_box([0; 24]); - _new::write_camel_case(black_box(bts), &mut buf) + let len = black_box(bts.len()); + _new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len) }); }); } @@ -30,9 +31,12 @@ criterion_group!(benches, bench_write_camel_case); criterion_main!(benches); mod _new { - pub fn write_camel_case(value: &[u8], buffer: &mut [u8]) { + pub fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) { // first copy entire (potentially wrong) slice to output - buffer[..value.len()].copy_from_slice(value); + let buffer = unsafe { + std::ptr::copy_nonoverlapping(value.as_ptr(), buf, len); + std::slice::from_raw_parts_mut(buf, len) + }; let mut iter = value.iter(); diff --git a/actix-http/src/error.rs b/actix-http/src/error.rs index 54666e072..f7d7f696a 100644 --- a/actix-http/src/error.rs +++ b/actix-http/src/error.rs @@ -196,7 +196,7 @@ pub enum ParseError { #[display(fmt = "IO error: {}", _0)] Io(io::Error), - /// Parsing a field as string failed + /// Parsing a field as string failed. #[display(fmt = "UTF8 error: {}", _0)] Utf8(Utf8Error), } diff --git a/actix-http/src/h1/chunked.rs b/actix-http/src/h1/chunked.rs new file mode 100644 index 000000000..1224ce08c --- /dev/null +++ b/actix-http/src/h1/chunked.rs @@ -0,0 +1,432 @@ +use std::{io, task::Poll}; + +use bytes::{Buf as _, Bytes, BytesMut}; + +macro_rules! byte ( + ($rdr:ident) => ({ + if $rdr.len() > 0 { + let b = $rdr[0]; + $rdr.advance(1); + b + } else { + return Poll::Pending + } + }) +); + +#[derive(Debug, PartialEq, Clone)] +pub(super) enum ChunkedState { + Size, + SizeLws, + Extension, + SizeLf, + Body, + BodyCr, + BodyLf, + EndCr, + EndLf, + End, +} + +impl ChunkedState { + pub(super) fn step( + &self, + body: &mut BytesMut, + size: &mut u64, + buf: &mut Option, + ) -> Poll> { + use self::ChunkedState::*; + match *self { + Size => ChunkedState::read_size(body, size), + SizeLws => ChunkedState::read_size_lws(body), + Extension => ChunkedState::read_extension(body), + SizeLf => ChunkedState::read_size_lf(body, size), + Body => ChunkedState::read_body(body, size, buf), + BodyCr => ChunkedState::read_body_cr(body), + BodyLf => ChunkedState::read_body_lf(body), + EndCr => ChunkedState::read_end_cr(body), + EndLf => ChunkedState::read_end_lf(body), + End => Poll::Ready(Ok(ChunkedState::End)), + } + } + + fn read_size( + rdr: &mut BytesMut, + size: &mut u64, + ) -> Poll> { + let radix = 16; + + let rem = match byte!(rdr) { + b @ b'0'..=b'9' => b - b'0', + b @ b'a'..=b'f' => b + 10 - b'a', + b @ b'A'..=b'F' => b + 10 - b'A', + b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)), + b';' => return Poll::Ready(Ok(ChunkedState::Extension)), + b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)), + _ => { + return Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size line: Invalid Size", + ))); + } + }; + + match size.checked_mul(radix) { + Some(n) => { + *size = n as u64; + *size += rem as u64; + + Poll::Ready(Ok(ChunkedState::Size)) + } + None => { + log::debug!("chunk size would overflow u64"); + Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size line: Size is too big", + ))) + } + } + } + + fn read_size_lws(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + // LWS can follow the chunk size, but no more digits can come + b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)), + b';' => Poll::Ready(Ok(ChunkedState::Extension)), + b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size linear white space", + ))), + } + } + fn read_extension(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), + // strictly 0x20 (space) should be disallowed but we don't parse quoted strings here + 0x00..=0x08 | 0x0a..=0x1f | 0x7f => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid character in chunk extension", + ))), + _ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions + } + } + fn read_size_lf( + rdr: &mut BytesMut, + size: &mut u64, + ) -> Poll> { + match byte!(rdr) { + b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)), + b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk size LF", + ))), + } + } + + fn read_body( + rdr: &mut BytesMut, + rem: &mut u64, + buf: &mut Option, + ) -> Poll> { + log::trace!("Chunked read, remaining={:?}", rem); + + let len = rdr.len() as u64; + if len == 0 { + Poll::Ready(Ok(ChunkedState::Body)) + } else { + let slice; + if *rem > len { + slice = rdr.split().freeze(); + *rem -= len; + } else { + slice = rdr.split_to(*rem as usize).freeze(); + *rem = 0; + } + *buf = Some(slice); + if *rem > 0 { + Poll::Ready(Ok(ChunkedState::Body)) + } else { + Poll::Ready(Ok(ChunkedState::BodyCr)) + } + } + } + + fn read_body_cr(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk body CR", + ))), + } + } + fn read_body_lf(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\n' => Poll::Ready(Ok(ChunkedState::Size)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk body LF", + ))), + } + } + fn read_end_cr(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk end CR", + ))), + } + } + fn read_end_lf(rdr: &mut BytesMut) -> Poll> { + match byte!(rdr) { + b'\n' => Poll::Ready(Ok(ChunkedState::End)), + _ => Poll::Ready(Err(io::Error::new( + io::ErrorKind::InvalidInput, + "Invalid chunk end LF", + ))), + } + } +} + +#[cfg(test)] +mod tests { + use actix_codec::Decoder as _; + use bytes::{Bytes, BytesMut}; + use http::Method; + + use crate::{ + error::ParseError, + h1::decoder::{MessageDecoder, PayloadItem}, + HttpMessage as _, Request, + }; + + macro_rules! parse_ready { + ($e:expr) => {{ + match MessageDecoder::::default().decode($e) { + Ok(Some((msg, _))) => msg, + Ok(_) => unreachable!("Eof during parsing http request"), + Err(err) => unreachable!("Error during parsing http request: {:?}", err), + } + }}; + } + + macro_rules! expect_parse_err { + ($e:expr) => {{ + match MessageDecoder::::default().decode($e) { + Err(err) => match err { + ParseError::Io(_) => unreachable!("Parse error expected"), + _ => {} + }, + _ => unreachable!("Error expected"), + } + }}; + } + + #[test] + fn test_parse_chunked_payload_chunk_extension() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\ + \r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(msg.chunked().unwrap()); + + buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n") + let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); + assert_eq!(chunk, Bytes::from_static(b"data")); + let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); + assert_eq!(chunk, Bytes::from_static(b"line")); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert!(msg.eof()); + } + + #[test] + fn test_request_chunked() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let req = parse_ready!(&mut buf); + + if let Ok(val) = req.chunked() { + assert!(val); + } else { + unreachable!("Error"); + } + + // intentional typo in "chunked" + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chnked\r\n\r\n", + ); + expect_parse_err!(&mut buf); + } + + #[test] + fn test_http_request_chunked_payload() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); + assert_eq!( + pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), + b"data" + ); + assert_eq!( + pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), + b"line" + ); + assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); + } + + #[test] + fn test_http_request_chunked_payload_and_next_message() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend( + b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\ + POST /test2 HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n" + .iter(), + ); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"data"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"line"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert!(msg.eof()); + + let (req, _) = reader.decode(&mut buf).unwrap().unwrap(); + assert!(req.chunked().unwrap()); + assert_eq!(*req.method(), Method::POST); + assert!(req.chunked().unwrap()); + } + + #[test] + fn test_http_request_chunked_payload_chunks() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + transfer-encoding: chunked\r\n\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + assert!(req.chunked().unwrap()); + + buf.extend(b"4\r\n1111\r\n"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"1111"); + + buf.extend(b"4\r\ndata\r"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"data"); + + buf.extend(b"\n4"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"\r"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + buf.extend(b"\n"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"li"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"li"); + + //trailers + //buf.feed_data("test: test\r\n"); + //not_ready!(reader.parse(&mut buf, &mut readbuf)); + + buf.extend(b"ne\r\n0\r\n"); + let msg = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(msg.chunk().as_ref(), b"ne"); + assert!(pl.decode(&mut buf).unwrap().is_none()); + + buf.extend(b"\r\n"); + assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); + } + + #[test] + fn chunk_extension_quoted() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + Host: localhost:8080\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 2;hello=b;one=\"1 2 3\"\r\n\ + xx", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let chunk = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"xx"))); + } + + #[test] + fn hrs_chunk_extension_invalid() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: localhost:8080\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 2;x\nx\r\n\ + 4c\r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let err = pl.decode(&mut buf).unwrap_err(); + assert!(err + .to_string() + .contains("Invalid character in chunk extension")); + } + + #[test] + fn hrs_chunk_size_overflow() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + f0000000000000003\r\n\ + abc\r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let err = pl.decode(&mut buf).unwrap_err(); + assert!(err + .to_string() + .contains("Invalid chunk size line: Size is too big")); + } +} diff --git a/actix-http/src/h1/decoder.rs b/actix-http/src/h1/decoder.rs index f240710c2..313ffd5e0 100644 --- a/actix-http/src/h1/decoder.rs +++ b/actix-http/src/h1/decoder.rs @@ -1,18 +1,18 @@ -use std::convert::TryFrom; -use std::io; -use std::marker::PhantomData; -use std::task::Poll; +use std::{convert::TryFrom, io, marker::PhantomData, task::Poll}; use actix_codec::Decoder; -use bytes::{Buf, Bytes, BytesMut}; +use bytes::{Bytes, BytesMut}; use http::header::{HeaderName, HeaderValue}; use http::{header, Method, StatusCode, Uri, Version}; use log::{debug, error, trace}; -use crate::error::ParseError; -use crate::header::HeaderMap; -use crate::message::{ConnectionType, ResponseHead}; -use crate::request::Request; +use super::chunked::ChunkedState; +use crate::{ + error::ParseError, + header::HeaderMap, + message::{ConnectionType, ResponseHead}, + request::Request, +}; pub(crate) const MAX_BUFFER_SIZE: usize = 131_072; const MAX_HEADERS: usize = 96; @@ -67,6 +67,7 @@ pub(crate) trait MessageType: Sized { let mut has_upgrade_websocket = false; let mut expect = false; let mut chunked = false; + let mut seen_te = false; let mut content_length = None; { @@ -85,8 +86,17 @@ pub(crate) trait MessageType: Sized { }; match name { - header::CONTENT_LENGTH => { - if let Ok(s) = value.to_str() { + header::CONTENT_LENGTH if content_length.is_some() => { + debug!("multiple Content-Length"); + return Err(ParseError::Header); + } + + header::CONTENT_LENGTH => match value.to_str() { + Ok(s) if s.trim().starts_with('+') => { + debug!("illegal Content-Length: {:?}", s); + return Err(ParseError::Header); + } + Ok(s) => { if let Ok(len) = s.parse::() { if len != 0 { content_length = Some(len); @@ -95,15 +105,31 @@ pub(crate) trait MessageType: Sized { debug!("illegal Content-Length: {:?}", s); return Err(ParseError::Header); } - } else { + } + Err(_) => { debug!("illegal Content-Length: {:?}", value); return Err(ParseError::Header); } - } + }, + // transfer-encoding + header::TRANSFER_ENCODING if seen_te => { + debug!("multiple Transfer-Encoding not allowed"); + return Err(ParseError::Header); + } + header::TRANSFER_ENCODING => { + seen_te = true; + if let Ok(s) = value.to_str().map(str::trim) { - chunked = s.eq_ignore_ascii_case("chunked"); + if s.eq_ignore_ascii_case("chunked") { + chunked = true; + } else if s.eq_ignore_ascii_case("identity") { + // allow silently since multiple TE headers are already checked + } else { + debug!("illegal Transfer-Encoding: {:?}", s); + return Err(ParseError::Header); + } } else { return Err(ParseError::Header); } @@ -408,20 +434,6 @@ enum Kind { Eof, } -#[derive(Debug, PartialEq, Clone)] -enum ChunkedState { - Size, - SizeLws, - Extension, - SizeLf, - Body, - BodyCr, - BodyLf, - EndCr, - EndLf, - End, -} - impl Decoder for PayloadDecoder { type Item = PayloadItem; type Error = io::Error; @@ -451,19 +463,23 @@ impl Decoder for PayloadDecoder { Kind::Chunked(ref mut state, ref mut size) => { loop { let mut buf = None; + // advances the chunked state *state = match state.step(src, size, &mut buf) { Poll::Pending => return Ok(None), Poll::Ready(Ok(state)) => state, Poll::Ready(Err(e)) => return Err(e), }; + if *state == ChunkedState::End { trace!("End of chunked stream"); return Ok(Some(PayloadItem::Eof)); } + if let Some(buf) = buf { return Ok(Some(PayloadItem::Chunk(buf))); } + if src.is_empty() { return Ok(None); } @@ -480,201 +496,40 @@ impl Decoder for PayloadDecoder { } } -macro_rules! byte ( - ($rdr:ident) => ({ - if $rdr.len() > 0 { - let b = $rdr[0]; - $rdr.advance(1); - b - } else { - return Poll::Pending - } - }) -); - -impl ChunkedState { - fn step( - &self, - body: &mut BytesMut, - size: &mut u64, - buf: &mut Option, - ) -> Poll> { - use self::ChunkedState::*; - match *self { - Size => ChunkedState::read_size(body, size), - SizeLws => ChunkedState::read_size_lws(body), - Extension => ChunkedState::read_extension(body), - SizeLf => ChunkedState::read_size_lf(body, size), - Body => ChunkedState::read_body(body, size, buf), - BodyCr => ChunkedState::read_body_cr(body), - BodyLf => ChunkedState::read_body_lf(body), - EndCr => ChunkedState::read_end_cr(body), - EndLf => ChunkedState::read_end_lf(body), - End => Poll::Ready(Ok(ChunkedState::End)), - } - } - - fn read_size( - rdr: &mut BytesMut, - size: &mut u64, - ) -> Poll> { - let radix = 16; - match byte!(rdr) { - b @ b'0'..=b'9' => { - *size *= radix; - *size += u64::from(b - b'0'); - } - b @ b'a'..=b'f' => { - *size *= radix; - *size += u64::from(b + 10 - b'a'); - } - b @ b'A'..=b'F' => { - *size *= radix; - *size += u64::from(b + 10 - b'A'); - } - b'\t' | b' ' => return Poll::Ready(Ok(ChunkedState::SizeLws)), - b';' => return Poll::Ready(Ok(ChunkedState::Extension)), - b'\r' => return Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => { - return Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size line: Invalid Size", - ))); - } - } - Poll::Ready(Ok(ChunkedState::Size)) - } - - fn read_size_lws(rdr: &mut BytesMut) -> Poll> { - trace!("read_size_lws"); - match byte!(rdr) { - // LWS can follow the chunk size, but no more digits can come - b'\t' | b' ' => Poll::Ready(Ok(ChunkedState::SizeLws)), - b';' => Poll::Ready(Ok(ChunkedState::Extension)), - b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size linear white space", - ))), - } - } - fn read_extension(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::SizeLf)), - _ => Poll::Ready(Ok(ChunkedState::Extension)), // no supported extensions - } - } - fn read_size_lf( - rdr: &mut BytesMut, - size: &mut u64, - ) -> Poll> { - match byte!(rdr) { - b'\n' if *size > 0 => Poll::Ready(Ok(ChunkedState::Body)), - b'\n' if *size == 0 => Poll::Ready(Ok(ChunkedState::EndCr)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk size LF", - ))), - } - } - - fn read_body( - rdr: &mut BytesMut, - rem: &mut u64, - buf: &mut Option, - ) -> Poll> { - trace!("Chunked read, remaining={:?}", rem); - - let len = rdr.len() as u64; - if len == 0 { - Poll::Ready(Ok(ChunkedState::Body)) - } else { - let slice; - if *rem > len { - slice = rdr.split().freeze(); - *rem -= len; - } else { - slice = rdr.split_to(*rem as usize).freeze(); - *rem = 0; - } - *buf = Some(slice); - if *rem > 0 { - Poll::Ready(Ok(ChunkedState::Body)) - } else { - Poll::Ready(Ok(ChunkedState::BodyCr)) - } - } - } - - fn read_body_cr(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::BodyLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk body CR", - ))), - } - } - fn read_body_lf(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\n' => Poll::Ready(Ok(ChunkedState::Size)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk body LF", - ))), - } - } - fn read_end_cr(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\r' => Poll::Ready(Ok(ChunkedState::EndLf)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk end CR", - ))), - } - } - fn read_end_lf(rdr: &mut BytesMut) -> Poll> { - match byte!(rdr) { - b'\n' => Poll::Ready(Ok(ChunkedState::End)), - _ => Poll::Ready(Err(io::Error::new( - io::ErrorKind::InvalidInput, - "Invalid chunk end LF", - ))), - } - } -} - #[cfg(test)] mod tests { use bytes::{Bytes, BytesMut}; use http::{Method, Version}; use super::*; - use crate::error::ParseError; - use crate::http::header::{HeaderName, SET_COOKIE}; - use crate::HttpMessage; + use crate::{ + error::ParseError, + http::header::{HeaderName, SET_COOKIE}, + HttpMessage as _, + }; impl PayloadType { - fn unwrap(self) -> PayloadDecoder { + pub(crate) fn unwrap(self) -> PayloadDecoder { match self { PayloadType::Payload(pl) => pl, _ => panic!(), } } - fn is_unhandled(&self) -> bool { + pub(crate) fn is_unhandled(&self) -> bool { matches!(self, PayloadType::Stream(_)) } } impl PayloadItem { - fn chunk(self) -> Bytes { + pub(crate) fn chunk(self) -> Bytes { match self { PayloadItem::Chunk(chunk) => chunk, _ => panic!("error"), } } - fn eof(&self) -> bool { + + pub(crate) fn eof(&self) -> bool { matches!(*self, PayloadItem::Eof) } } @@ -967,34 +822,6 @@ mod tests { assert!(req.upgrade()); } - #[test] - fn test_request_chunked() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let req = parse_ready!(&mut buf); - - if let Ok(val) = req.chunked() { - assert!(val); - } else { - unreachable!("Error"); - } - - // intentional typo in "chunked" - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chnked\r\n\r\n", - ); - let req = parse_ready!(&mut buf); - - if let Ok(val) = req.chunked() { - assert!(!val); - } else { - unreachable!("Error"); - } - } - #[test] fn test_headers_content_length_err_1() { let mut buf = BytesMut::from( @@ -1112,126 +939,6 @@ mod tests { expect_parse_err!(&mut buf); } - #[test] - fn test_http_request_chunked_payload() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); - assert_eq!( - pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), - b"data" - ); - assert_eq!( - pl.decode(&mut buf).unwrap().unwrap().chunk().as_ref(), - b"line" - ); - assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); - } - - #[test] - fn test_http_request_chunked_payload_and_next_message() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend( - b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n\ - POST /test2 HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n" - .iter(), - ); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"data"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"line"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert!(msg.eof()); - - let (req, _) = reader.decode(&mut buf).unwrap().unwrap(); - assert!(req.chunked().unwrap()); - assert_eq!(*req.method(), Method::POST); - assert!(req.chunked().unwrap()); - } - - #[test] - fn test_http_request_chunked_payload_chunks() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\r\n", - ); - - let mut reader = MessageDecoder::::default(); - let (req, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(req.chunked().unwrap()); - - buf.extend(b"4\r\n1111\r\n"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"1111"); - - buf.extend(b"4\r\ndata\r"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"data"); - - buf.extend(b"\n4"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"\r"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - buf.extend(b"\n"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"li"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"li"); - - //trailers - //buf.feed_data("test: test\r\n"); - //not_ready!(reader.parse(&mut buf, &mut readbuf)); - - buf.extend(b"ne\r\n0\r\n"); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert_eq!(msg.chunk().as_ref(), b"ne"); - assert!(pl.decode(&mut buf).unwrap().is_none()); - - buf.extend(b"\r\n"); - assert!(pl.decode(&mut buf).unwrap().unwrap().eof()); - } - - #[test] - fn test_parse_chunked_payload_chunk_extension() { - let mut buf = BytesMut::from( - "GET /test HTTP/1.1\r\n\ - transfer-encoding: chunked\r\n\ - \r\n", - ); - - let mut reader = MessageDecoder::::default(); - let (msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); - let mut pl = pl.unwrap(); - assert!(msg.chunked().unwrap()); - - buf.extend(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"); // test: test\r\n\r\n") - let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); - assert_eq!(chunk, Bytes::from_static(b"data")); - let chunk = pl.decode(&mut buf).unwrap().unwrap().chunk(); - assert_eq!(chunk, Bytes::from_static(b"line")); - let msg = pl.decode(&mut buf).unwrap().unwrap(); - assert!(msg.eof()); - } - #[test] fn test_response_http10_read_until_eof() { let mut buf = BytesMut::from("HTTP/1.0 200 Ok\r\n\r\ntest data"); @@ -1243,4 +950,84 @@ mod tests { let chunk = pl.decode(&mut buf).unwrap().unwrap(); assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"test data"))); } + + #[test] + fn hrs_multiple_content_length() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 4\r\n\ + Content-Length: 2\r\n\ + \r\n\ + abcd", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_content_length_plus() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: +3\r\n\ + \r\n\ + 000", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_unknown_transfer_encoding() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Transfer-Encoding: JUNK\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 5\r\n\ + hello\r\n\ + 0", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn hrs_multiple_transfer_encoding() { + let mut buf = BytesMut::from( + "GET / HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 51\r\n\ + Transfer-Encoding: identity\r\n\ + Transfer-Encoding: chunked\r\n\ + \r\n\ + 0\r\n\ + \r\n\ + GET /forbidden HTTP/1.1\r\n\ + Host: example.com\r\n\r\n", + ); + + expect_parse_err!(&mut buf); + } + + #[test] + fn transfer_encoding_agrees() { + let mut buf = BytesMut::from( + "GET /test HTTP/1.1\r\n\ + Host: example.com\r\n\ + Content-Length: 3\r\n\ + Transfer-Encoding: identity\r\n\ + \r\n\ + 0\r\n", + ); + + let mut reader = MessageDecoder::::default(); + let (_msg, pl) = reader.decode(&mut buf).unwrap().unwrap(); + let mut pl = pl.unwrap(); + + let chunk = pl.decode(&mut buf).unwrap().unwrap(); + assert_eq!(chunk, PayloadItem::Chunk(Bytes::from_static(b"0\r\n"))); + } } diff --git a/actix-http/src/h1/encoder.rs b/actix-http/src/h1/encoder.rs index 254981123..5e1d47785 100644 --- a/actix-http/src/h1/encoder.rs +++ b/actix-http/src/h1/encoder.rs @@ -81,6 +81,7 @@ pub(crate) trait MessageType: Sized { match length { BodySize::Stream => { if chunked { + skip_len = true; if camel_case { dst.put_slice(b"\r\nTransfer-Encoding: chunked\r\n") } else { @@ -174,7 +175,7 @@ pub(crate) trait MessageType: Sized { unsafe { if camel_case { // use Camel-Case headers - write_camel_case(k, from_raw_parts_mut(buf, k_len)); + write_camel_case(k, buf, k_len); } else { write_data(k, buf, k_len); } @@ -472,15 +473,22 @@ impl TransferEncoding { } /// # Safety -/// Callers must ensure that the given length matches given value length. +/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is +/// valid for writes of at least `len` bytes. unsafe fn write_data(value: &[u8], buf: *mut u8, len: usize) { debug_assert_eq!(value.len(), len); copy_nonoverlapping(value.as_ptr(), buf, len); } -fn write_camel_case(value: &[u8], buffer: &mut [u8]) { +/// # Safety +/// Callers must ensure that the given `len` matches the given `value` length and that `buf` is +/// valid for writes of at least `len` bytes. +unsafe fn write_camel_case(value: &[u8], buf: *mut u8, len: usize) { // first copy entire (potentially wrong) slice to output - buffer[..value.len()].copy_from_slice(value); + write_data(value, buf, len); + + // SAFETY: We just initialized the buffer with `value` + let buffer = from_raw_parts_mut(buf, len); let mut iter = value.iter(); diff --git a/actix-http/src/h1/mod.rs b/actix-http/src/h1/mod.rs index 7e6df6ceb..17cbfb90f 100644 --- a/actix-http/src/h1/mod.rs +++ b/actix-http/src/h1/mod.rs @@ -1,6 +1,8 @@ //! HTTP/1 protocol implementation. + use bytes::{Bytes, BytesMut}; +mod chunked; mod client; mod codec; mod decoder; diff --git a/actix-http/src/header/map.rs b/actix-http/src/header/map.rs index 634d9282f..a8fd9715b 100644 --- a/actix-http/src/header/map.rs +++ b/actix-http/src/header/map.rs @@ -684,7 +684,7 @@ impl<'a> Iterator for Iter<'a> { fn next(&mut self) -> Option { // handle in-progress multi value lists first - if let Some((ref name, ref mut vals)) = self.multi_inner { + if let Some((name, ref mut vals)) = self.multi_inner { match vals.get(self.multi_idx) { Some(val) => { self.multi_idx += 1; diff --git a/actix-http/src/lib.rs b/actix-http/src/lib.rs index d22e1ee44..17ee3ff29 100644 --- a/actix-http/src/lib.rs +++ b/actix-http/src/lib.rs @@ -14,7 +14,7 @@ //! [rustls]: https://crates.io/crates/rustls //! [trust-dns]: https://crates.io/crates/trust-dns -#![deny(rust_2018_idioms, nonstandard_style)] +#![deny(rust_2018_idioms, nonstandard_style, clippy::uninit_assumed_init)] #![allow( clippy::type_complexity, clippy::too_many_arguments, diff --git a/actix-http/src/message.rs b/actix-http/src/message.rs index e85d686b7..84125fb3a 100644 --- a/actix-http/src/message.rs +++ b/actix-http/src/message.rs @@ -209,7 +209,7 @@ impl RequestHeadType { impl AsRef for RequestHeadType { fn as_ref(&self) -> &RequestHead { match self { - RequestHeadType::Owned(head) => &head, + RequestHeadType::Owned(head) => head, RequestHeadType::Rc(head, _) => head.as_ref(), } } @@ -363,7 +363,7 @@ impl std::ops::Deref for Message { type Target = T; fn deref(&self) -> &Self::Target { - &self.head.as_ref() + self.head.as_ref() } } diff --git a/actix-router/src/path.rs b/actix-router/src/path.rs index e29591f96..9af7b0b8b 100644 --- a/actix-router/src/path.rs +++ b/actix-router/src/path.rs @@ -125,7 +125,7 @@ impl Path { for (seg_name, val) in self.segments.iter() { if name == seg_name { return match val { - PathItem::Static(ref s) => Some(&s), + PathItem::Static(ref s) => Some(s), PathItem::Segment(s, e) => { Some(&self.path.path()[(*s as usize)..(*e as usize)]) } @@ -183,7 +183,7 @@ impl<'a, T: ResourcePath> Iterator for PathIter<'a, T> { if self.idx < self.params.segment_count() { let idx = self.idx; let res = match self.params.segments[idx].1 { - PathItem::Static(ref s) => &s, + PathItem::Static(ref s) => s, PathItem::Segment(s, e) => &self.params.path.path()[(s as usize)..(e as usize)], }; self.idx += 1; @@ -207,7 +207,7 @@ impl Index for Path { fn index(&self, idx: usize) -> &str { match self.segments[idx].1 { - PathItem::Static(ref s) => &s, + PathItem::Static(ref s) => s, PathItem::Segment(s, e) => &self.path.path()[(s as usize)..(e as usize)], } } diff --git a/actix-router/src/resource.rs b/actix-router/src/resource.rs index 2593a3ef0..57bd67bd2 100644 --- a/actix-router/src/resource.rs +++ b/actix-router/src/resource.rs @@ -303,7 +303,7 @@ impl ResourceDef { let mut pattern_data = Vec::new(); for pattern in &patterns { - match ResourceDef::parse(&pattern, false, true) { + match ResourceDef::parse(pattern, false, true) { (PatternType::Dynamic(re, names), _) => { re_set.push(re.as_str().to_owned()); pattern_data.push((re, names)); @@ -778,7 +778,7 @@ impl ResourceDef { profile_section!(pattern_dynamic_extract_captures); for (no, name) in names.iter().enumerate() { - if let Some(m) = captures.name(&name) { + if let Some(m) = captures.name(name) { segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16); } else { log::error!( @@ -808,7 +808,7 @@ impl ResourceDef { }; for (no, name) in names.iter().enumerate() { - if let Some(m) = captures.name(&name) { + if let Some(m) = captures.name(name) { segments[no] = PathItem::Segment(m.start() as u16, m.end() as u16); } else { log::error!("Dynamic path match but not all segments found: {}", name); diff --git a/docs/graphs/net-only.dot b/docs/graphs/net-only.dot index bee0185ab..8a58ec2b8 100644 --- a/docs/graphs/net-only.dot +++ b/docs/graphs/net-only.dot @@ -4,7 +4,7 @@ digraph { subgraph cluster_net { label="actix-net" "actix-codec" "actix-macros" "actix-rt" "actix-server" "actix-service" - "actix-tls" "actix-tracing" "actix-utils" "actix-router" + "actix-tls" "actix-tracing" "actix-utils" } subgraph cluster_other { @@ -25,7 +25,6 @@ digraph { "actix-tls" -> { "tokio-util" }[color="#009900"] "actix-server" -> { "actix-service" "actix-rt" "actix-utils" "tokio" } "actix-rt" -> { "actix-macros" "tokio" } - "actix-router" -> { "bytestring" } "local-channel" -> { "local-waker" } diff --git a/docs/graphs/web-focus.dot b/docs/graphs/web-focus.dot index 2c6e2779b..63b3eaa82 100644 --- a/docs/graphs/web-focus.dot +++ b/docs/graphs/web-focus.dot @@ -10,6 +10,7 @@ digraph { "web-actors" "web-codegen" "http-test" + "router" { rank=same; "multipart" "web-actors" "http-test" }; { rank=same; "files" "awc" "web" }; @@ -36,7 +37,7 @@ digraph { "rt" -> { "macros" } { rank=same; "utils" "codec" }; - { rank=same; "rt" "macros" "service" "router" }; + { rank=same; "rt" "macros" "service" }; // actix diff --git a/docs/graphs/web-only.dot b/docs/graphs/web-only.dot index b0decd818..ee74c292b 100644 --- a/docs/graphs/web-only.dot +++ b/docs/graphs/web-only.dot @@ -10,9 +10,10 @@ digraph { "actix-web-codegen" "actix-http-test" "actix-test" + "actix-router" } - "actix-web" -> { "actix-web-codegen" "actix-http" } + "actix-web" -> { "actix-web-codegen" "actix-http" "actix-router" } "awc" -> { "actix-http" } "actix-web-actors" -> { "actix" "actix-web" "actix-http" } "actix-multipart" -> { "actix-web" } diff --git a/src/http/header/content_disposition.rs b/src/http/header/content_disposition.rs index 9f67baffb..6e75fde92 100644 --- a/src/http/header/content_disposition.rs +++ b/src/http/header/content_disposition.rs @@ -457,7 +457,7 @@ impl Header for ContentDisposition { fn parse(msg: &T) -> Result { if let Some(h) = msg.headers().get(&Self::name()) { - Self::from_raw(&h) + Self::from_raw(h) } else { Err(crate::error::ParseError::Header) } diff --git a/src/middleware/logger.rs b/src/middleware/logger.rs index bbb0e3dc4..0f09b6ad6 100644 --- a/src/middleware/logger.rs +++ b/src/middleware/logger.rs @@ -553,7 +553,7 @@ impl FormatText { *self = FormatText::Str(s.to_string()); } FormatText::RemoteAddr => { - let s = if let Some(ref peer) = req.connection_info().remote_addr() { + let s = if let Some(peer) = req.connection_info().remote_addr() { FormatText::Str((*peer).to_string()) } else { FormatText::Str("-".to_string()) diff --git a/src/request.rs b/src/request.rs index 41c8252a8..59850b4ca 100644 --- a/src/request.rs +++ b/src/request.rs @@ -184,7 +184,7 @@ impl HttpRequest { U: IntoIterator, I: AsRef, { - self.resource_map().url_for(&self, name, elements) + self.resource_map().url_for(self, name, elements) } /// Generate url for named resource @@ -199,7 +199,7 @@ impl HttpRequest { #[inline] /// Get a reference to a `ResourceMap` of current application. pub fn resource_map(&self) -> &ResourceMap { - &self.app_state().rmap() + self.app_state().rmap() } /// Peer socket address. diff --git a/src/service.rs b/src/service.rs index 148199407..48167e5b3 100644 --- a/src/service.rs +++ b/src/service.rs @@ -117,7 +117,7 @@ impl ServiceRequest { /// This method returns reference to the request head #[inline] pub fn head(&self) -> &RequestHead { - &self.req.head() + self.req.head() } /// This method returns reference to the request head diff --git a/src/types/either.rs b/src/types/either.rs index d3b003587..35e63cec9 100644 --- a/src/types/either.rs +++ b/src/types/either.rs @@ -253,7 +253,7 @@ where Ok(bytes) => { let fallback = bytes.clone(); let left = - L::from_request(&this.req, &mut payload_from_bytes(bytes)); + L::from_request(this.req, &mut payload_from_bytes(bytes)); EitherExtractState::Left { left, fallback } } Err(err) => break Err(EitherExtractError::Bytes(err)), @@ -265,7 +265,7 @@ where Ok(extracted) => break Ok(Either::Left(extracted)), Err(left_err) => { let right = R::from_request( - &this.req, + this.req, &mut payload_from_bytes(mem::take(fallback)), ); EitherExtractState::Right { diff --git a/src/types/json.rs b/src/types/json.rs index fc02c8854..ab9708c53 100644 --- a/src/types/json.rs +++ b/src/types/json.rs @@ -425,7 +425,7 @@ where } } None => { - let json = serde_json::from_slice::(&buf) + let json = serde_json::from_slice::(buf) .map_err(JsonPayloadError::Deserialize)?; return Poll::Ready(Ok(json)); }