mirror of https://github.com/fafhrd91/actix-web
Compare commits
21 Commits
9296799c2b
...
3f1d61837f
Author | SHA1 | Date |
---|---|---|
|
3f1d61837f | |
|
90c19a835d | |
|
adf57d2b24 | |
|
fcd10fbb5e | |
|
95b6a81f43 | |
|
ab18efe0ac | |
|
36a7e8cc6a | |
|
5e28e69e29 | |
|
ab04286b01 | |
|
627d113323 | |
|
94c5d4d641 | |
|
43fca317d3 | |
|
69ca0e7c57 | |
|
a94b5b89a5 | |
|
42bd5eebdb | |
|
8eb1d10bae | |
|
96f5ebb549 | |
|
a6b5c9893d | |
|
09b460c72e | |
|
b492b27e4a | |
|
35cadbbe0b |
|
@ -0,0 +1,3 @@
|
||||||
|
version: "0.2"
|
||||||
|
words:
|
||||||
|
- actix
|
|
@ -49,7 +49,7 @@ jobs:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ jobs:
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||||
|
|
||||||
- name: Install just, cargo-hack
|
- name: Install just, cargo-hack
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-hack
|
tool: just,cargo-hack
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ jobs:
|
||||||
toolchain: ${{ matrix.version.version }}
|
toolchain: ${{ matrix.version.version }}
|
||||||
|
|
||||||
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
- name: Install just, cargo-hack, cargo-nextest, cargo-ci-cache-clean
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
tool: just,cargo-hack,cargo-nextest,cargo-ci-cache-clean
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ jobs:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
|
|
||||||
- name: Install just
|
- name: Install just
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just
|
tool: just
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ jobs:
|
||||||
components: llvm-tools
|
components: llvm-tools
|
||||||
|
|
||||||
- name: Install just, cargo-llvm-cov, cargo-nextest
|
- name: Install just, cargo-llvm-cov, cargo-nextest
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just,cargo-llvm-cov,cargo-nextest
|
tool: just,cargo-llvm-cov,cargo-nextest
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ jobs:
|
||||||
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
toolchain: ${{ vars.RUST_VERSION_EXTERNAL_TYPES }}
|
||||||
|
|
||||||
- name: Install just
|
- name: Install just
|
||||||
uses: taiki-e/install-action@v2.49.17
|
uses: taiki-e/install-action@v2.49.33
|
||||||
with:
|
with:
|
||||||
tool: just
|
tool: just
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
Cargo.lock
|
|
||||||
target/
|
target/
|
||||||
guide/build/
|
guide/build/
|
||||||
/gh-pages
|
/gh-pages
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -107,7 +107,7 @@ actix-utils = "3"
|
||||||
actix-rt = { version = "2.2", default-features = false }
|
actix-rt = { version = "2.2", default-features = false }
|
||||||
|
|
||||||
bitflags = "2"
|
bitflags = "2"
|
||||||
bytes = "1"
|
bytes = "1.7"
|
||||||
bytestring = "1"
|
bytestring = "1"
|
||||||
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
derive_more = { version = "2", features = ["as_ref", "deref", "deref_mut", "display", "error", "from"] }
|
||||||
encoding_rs = "0.8"
|
encoding_rs = "0.8"
|
||||||
|
|
|
@ -1,19 +1,34 @@
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use actix_http::HttpService;
|
use actix_http::HttpService;
|
||||||
use actix_server::Server;
|
use actix_server::Server;
|
||||||
use actix_service::map_config;
|
use actix_service::map_config;
|
||||||
use actix_web::{dev::AppConfig, get, App, Responder};
|
use actix_web::{dev::AppConfig, get, App, Responder};
|
||||||
|
|
||||||
|
static MEDIUM: OnceLock<String> = OnceLock::new();
|
||||||
|
static LARGE: OnceLock<String> = OnceLock::new();
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
async fn index() -> impl Responder {
|
async fn index() -> impl Responder {
|
||||||
"Hello, world. From Actix Web!"
|
"Hello, world. From Actix Web!"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/large")]
|
||||||
|
async fn large() -> &'static str {
|
||||||
|
LARGE.get_or_init(|| "123456890".repeat(1024 * 100))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/medium")]
|
||||||
|
async fn medium() -> &'static str {
|
||||||
|
MEDIUM.get_or_init(|| "123456890".repeat(1024 * 5))
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::main(flavor = "current_thread")]
|
#[tokio::main(flavor = "current_thread")]
|
||||||
async fn main() -> std::io::Result<()> {
|
async fn main() -> std::io::Result<()> {
|
||||||
Server::build()
|
Server::build()
|
||||||
.bind("hello-world", "127.0.0.1:8080", || {
|
.bind("hello-world", "127.0.0.1:8080", || {
|
||||||
// construct actix-web app
|
// construct actix-web app
|
||||||
let app = App::new().service(index);
|
let app = App::new().service(index).service(large).service(medium);
|
||||||
|
|
||||||
HttpService::build()
|
HttpService::build()
|
||||||
// pass the app to service builder
|
// pass the app to service builder
|
||||||
|
|
|
@ -0,0 +1,124 @@
|
||||||
|
use std::collections::VecDeque;
|
||||||
|
|
||||||
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||||
|
|
||||||
|
// 64KB max capacity (arbitrarily chosen)
|
||||||
|
const MAX_CAPACITY: usize = 1024 * 64;
|
||||||
|
|
||||||
|
pub struct BigBytes {
|
||||||
|
buffer: BytesMut,
|
||||||
|
frozen: VecDeque<Bytes>,
|
||||||
|
frozen_len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BigBytes {
|
||||||
|
/// Initialize a new BigBytes with the internal buffer set to `capacity` capacity
|
||||||
|
pub fn with_capacity(capacity: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
buffer: BytesMut::with_capacity(capacity),
|
||||||
|
frozen: VecDeque::default(),
|
||||||
|
frozen_len: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear the internal queue and buffer, resetting length to zero
|
||||||
|
///
|
||||||
|
/// if the internal buffer capacity exceeds 64KB or new_capacity, whichever is greater, it will
|
||||||
|
/// be freed and a new buffer of capacity `new_capacity` will be allocated
|
||||||
|
pub fn clear(&mut self, new_capacity: usize) {
|
||||||
|
std::mem::take(&mut self.frozen);
|
||||||
|
self.frozen_len = 0;
|
||||||
|
self.buffer.clear();
|
||||||
|
|
||||||
|
if self.buffer.capacity() > new_capacity.max(MAX_CAPACITY) {
|
||||||
|
self.buffer = BytesMut::with_capacity(new_capacity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a mutable reference to the underlying buffer. This should only be used when dealing
|
||||||
|
/// with small allocations (e.g. writing headers)
|
||||||
|
pub fn buffer_mut(&mut self) -> &mut BytesMut {
|
||||||
|
&mut self.buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the total length of the bytes stored in BigBytes
|
||||||
|
pub fn total_len(&mut self) -> usize {
|
||||||
|
self.frozen_len + self.buffer.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return whether there are no bytes present in the BigBytes
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.frozen_len == 0 && self.buffer.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add the `bytes` to the internal structure. If `bytes` exceeds 64KB, it is pushed into a
|
||||||
|
/// queue, otherwise, it is added to a buffer.
|
||||||
|
pub fn put_bytes(&mut self, bytes: Bytes) {
|
||||||
|
if !self.buffer.is_empty() {
|
||||||
|
let current = self.buffer.split().freeze();
|
||||||
|
self.frozen_len += current.len();
|
||||||
|
self.frozen.push_back(current);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !bytes.is_empty() {
|
||||||
|
self.frozen_len += bytes.len();
|
||||||
|
self.frozen.push_back(bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a slice of the frontmost buffer
|
||||||
|
///
|
||||||
|
/// While there are bytes present in BigBytes, front_slice is guaranteed not to return an empty
|
||||||
|
/// slice.
|
||||||
|
pub fn front_slice(&self) -> &[u8] {
|
||||||
|
if let Some(front) = self.frozen.front() {
|
||||||
|
front
|
||||||
|
} else {
|
||||||
|
&self.buffer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Advances the first buffer by `count` bytes. If the first buffer is advanced to completion,
|
||||||
|
/// it is popped from the queue
|
||||||
|
pub fn advance(&mut self, count: usize) {
|
||||||
|
if let Some(front) = self.frozen.front_mut() {
|
||||||
|
front.advance(count);
|
||||||
|
|
||||||
|
if front.is_empty() {
|
||||||
|
self.frozen.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
self.frozen_len -= count;
|
||||||
|
} else {
|
||||||
|
self.buffer.advance(count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pops the front Bytes from the BigBytes, or splits and freezes the internal buffer if no
|
||||||
|
/// Bytes are present.
|
||||||
|
pub fn pop_front(&mut self) -> Option<Bytes> {
|
||||||
|
if let Some(front) = self.frozen.pop_front() {
|
||||||
|
self.frozen_len -= front.len();
|
||||||
|
Some(front)
|
||||||
|
} else if !self.buffer.is_empty() {
|
||||||
|
Some(self.buffer.split().freeze())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Drain the BigBytes, writing everything into the provided BytesMut
|
||||||
|
pub fn write_to(&mut self, dst: &mut BytesMut) {
|
||||||
|
dst.reserve(self.total_len());
|
||||||
|
|
||||||
|
for buf in &self.frozen {
|
||||||
|
dst.put_slice(buf);
|
||||||
|
}
|
||||||
|
|
||||||
|
dst.put_slice(&self.buffer.split());
|
||||||
|
|
||||||
|
self.frozen_len = 0;
|
||||||
|
|
||||||
|
std::mem::take(&mut self.frozen);
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,7 +9,10 @@ use super::{
|
||||||
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
decoder::{self, PayloadDecoder, PayloadItem, PayloadType},
|
||||||
encoder, Message, MessageType,
|
encoder, Message, MessageType,
|
||||||
};
|
};
|
||||||
use crate::{body::BodySize, error::ParseError, ConnectionType, Request, Response, ServiceConfig};
|
use crate::{
|
||||||
|
big_bytes::BigBytes, body::BodySize, error::ParseError, ConnectionType, Request, Response,
|
||||||
|
ServiceConfig,
|
||||||
|
};
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
@ -146,14 +149,12 @@ impl Decoder for Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
impl Codec {
|
||||||
type Error = io::Error;
|
pub(super) fn encode_bigbytes(
|
||||||
|
|
||||||
fn encode(
|
|
||||||
&mut self,
|
&mut self,
|
||||||
item: Message<(Response<()>, BodySize)>,
|
item: Message<(Response<()>, BodySize)>,
|
||||||
dst: &mut BytesMut,
|
dst: &mut BigBytes,
|
||||||
) -> Result<(), Self::Error> {
|
) -> std::io::Result<()> {
|
||||||
match item {
|
match item {
|
||||||
Message::Item((mut res, length)) => {
|
Message::Item((mut res, length)) => {
|
||||||
// set response version
|
// set response version
|
||||||
|
@ -172,7 +173,7 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||||
|
|
||||||
// encode message
|
// encode message
|
||||||
self.encoder.encode(
|
self.encoder.encode(
|
||||||
dst,
|
dst.buffer_mut(),
|
||||||
&mut res,
|
&mut res,
|
||||||
self.flags.contains(Flags::HEAD),
|
self.flags.contains(Flags::HEAD),
|
||||||
self.flags.contains(Flags::STREAM),
|
self.flags.contains(Flags::STREAM),
|
||||||
|
@ -184,11 +185,11 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||||
}
|
}
|
||||||
|
|
||||||
Message::Chunk(Some(bytes)) => {
|
Message::Chunk(Some(bytes)) => {
|
||||||
self.encoder.encode_chunk(bytes.as_ref(), dst)?;
|
self.encoder.encode_chunk_bigbytes(bytes, dst)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Message::Chunk(None) => {
|
Message::Chunk(None) => {
|
||||||
self.encoder.encode_eof(dst)?;
|
self.encoder.encode_eof(dst.buffer_mut())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,6 +197,23 @@ impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Encoder<Message<(Response<()>, BodySize)>> for Codec {
|
||||||
|
type Error = io::Error;
|
||||||
|
|
||||||
|
fn encode(
|
||||||
|
&mut self,
|
||||||
|
item: Message<(Response<()>, BodySize)>,
|
||||||
|
dst: &mut BytesMut,
|
||||||
|
) -> Result<(), Self::Error> {
|
||||||
|
let mut bigbytes = BigBytes::with_capacity(1024 * 8);
|
||||||
|
self.encode_bigbytes(item, &mut bigbytes)?;
|
||||||
|
|
||||||
|
bigbytes.write_to(dst);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -12,11 +12,11 @@ use actix_codec::{Framed, FramedParts};
|
||||||
use actix_rt::time::sleep_until;
|
use actix_rt::time::sleep_until;
|
||||||
use actix_service::Service;
|
use actix_service::Service;
|
||||||
use bitflags::bitflags;
|
use bitflags::bitflags;
|
||||||
use bytes::{Buf, BytesMut};
|
use bytes::BytesMut;
|
||||||
use futures_core::ready;
|
use futures_core::ready;
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
use tokio::io::{AsyncRead, AsyncWrite};
|
use tokio::io::{AsyncRead, AsyncWrite};
|
||||||
use tokio_util::codec::{Decoder as _, Encoder as _};
|
use tokio_util::codec::Decoder as _;
|
||||||
use tracing::{error, trace};
|
use tracing::{error, trace};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -27,6 +27,7 @@ use super::{
|
||||||
Message, MessageType,
|
Message, MessageType,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
big_bytes::BigBytes,
|
||||||
body::{BodySize, BoxBody, MessageBody},
|
body::{BodySize, BoxBody, MessageBody},
|
||||||
config::ServiceConfig,
|
config::ServiceConfig,
|
||||||
error::{DispatchError, ParseError, PayloadError},
|
error::{DispatchError, ParseError, PayloadError},
|
||||||
|
@ -165,7 +166,7 @@ pin_project! {
|
||||||
|
|
||||||
pub(super) io: Option<T>,
|
pub(super) io: Option<T>,
|
||||||
read_buf: BytesMut,
|
read_buf: BytesMut,
|
||||||
write_buf: BytesMut,
|
write_buf: BigBytes,
|
||||||
codec: Codec,
|
codec: Codec,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -277,7 +278,7 @@ where
|
||||||
|
|
||||||
io: Some(io),
|
io: Some(io),
|
||||||
read_buf: BytesMut::with_capacity(HW_BUFFER_SIZE),
|
read_buf: BytesMut::with_capacity(HW_BUFFER_SIZE),
|
||||||
write_buf: BytesMut::with_capacity(HW_BUFFER_SIZE),
|
write_buf: BigBytes::with_capacity(HW_BUFFER_SIZE),
|
||||||
codec: Codec::new(config),
|
codec: Codec::new(config),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -329,27 +330,24 @@ where
|
||||||
let InnerDispatcherProj { io, write_buf, .. } = self.project();
|
let InnerDispatcherProj { io, write_buf, .. } = self.project();
|
||||||
let mut io = Pin::new(io.as_mut().unwrap());
|
let mut io = Pin::new(io.as_mut().unwrap());
|
||||||
|
|
||||||
let len = write_buf.len();
|
while write_buf.total_len() > 0 {
|
||||||
let mut written = 0;
|
match io.as_mut().poll_write(cx, write_buf.front_slice())? {
|
||||||
|
|
||||||
while written < len {
|
|
||||||
match io.as_mut().poll_write(cx, &write_buf[written..])? {
|
|
||||||
Poll::Ready(0) => {
|
Poll::Ready(0) => {
|
||||||
|
println!("WRITE ZERO");
|
||||||
error!("write zero; closing");
|
error!("write zero; closing");
|
||||||
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
|
return Poll::Ready(Err(io::Error::new(io::ErrorKind::WriteZero, "")));
|
||||||
}
|
}
|
||||||
|
|
||||||
Poll::Ready(n) => written += n,
|
Poll::Ready(n) => write_buf.advance(n),
|
||||||
|
|
||||||
Poll::Pending => {
|
Poll::Pending => {
|
||||||
write_buf.advance(written);
|
|
||||||
return Poll::Pending;
|
return Poll::Pending;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// everything has written to I/O; clear buffer
|
// everything has written to I/O; clear buffer
|
||||||
write_buf.clear();
|
write_buf.clear(HW_BUFFER_SIZE);
|
||||||
|
|
||||||
// flush the I/O and check if get blocked
|
// flush the I/O and check if get blocked
|
||||||
io.poll_flush(cx)
|
io.poll_flush(cx)
|
||||||
|
@ -365,7 +363,7 @@ where
|
||||||
let size = body.size();
|
let size = body.size();
|
||||||
|
|
||||||
this.codec
|
this.codec
|
||||||
.encode(Message::Item((res, size)), this.write_buf)
|
.encode_bigbytes(Message::Item((res, size)), this.write_buf)
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
if let Some(mut payload) = this.payload.take() {
|
if let Some(mut payload) = this.payload.take() {
|
||||||
payload.set_error(PayloadError::Incomplete(None));
|
payload.set_error(PayloadError::Incomplete(None));
|
||||||
|
@ -416,6 +414,7 @@ where
|
||||||
fn send_continue(self: Pin<&mut Self>) {
|
fn send_continue(self: Pin<&mut Self>) {
|
||||||
self.project()
|
self.project()
|
||||||
.write_buf
|
.write_buf
|
||||||
|
.buffer_mut()
|
||||||
.extend_from_slice(b"HTTP/1.1 100 Continue\r\n\r\n");
|
.extend_from_slice(b"HTTP/1.1 100 Continue\r\n\r\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -493,15 +492,16 @@ where
|
||||||
StateProj::SendPayload { mut body } => {
|
StateProj::SendPayload { mut body } => {
|
||||||
// keep populate writer buffer until buffer size limit hit,
|
// keep populate writer buffer until buffer size limit hit,
|
||||||
// get blocked or finished.
|
// get blocked or finished.
|
||||||
while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE {
|
while this.write_buf.total_len() < super::payload::MAX_BUFFER_SIZE {
|
||||||
match body.as_mut().poll_next(cx) {
|
match body.as_mut().poll_next(cx) {
|
||||||
Poll::Ready(Some(Ok(item))) => {
|
Poll::Ready(Some(Ok(item))) => {
|
||||||
this.codec
|
this.codec
|
||||||
.encode(Message::Chunk(Some(item)), this.write_buf)?;
|
.encode_bigbytes(Message::Chunk(Some(item)), this.write_buf)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Poll::Ready(None) => {
|
Poll::Ready(None) => {
|
||||||
this.codec.encode(Message::Chunk(None), this.write_buf)?;
|
this.codec
|
||||||
|
.encode_bigbytes(Message::Chunk(None), this.write_buf)?;
|
||||||
|
|
||||||
// payload stream finished.
|
// payload stream finished.
|
||||||
// set state to None and handle next message
|
// set state to None and handle next message
|
||||||
|
@ -532,15 +532,16 @@ where
|
||||||
|
|
||||||
// keep populate writer buffer until buffer size limit hit,
|
// keep populate writer buffer until buffer size limit hit,
|
||||||
// get blocked or finished.
|
// get blocked or finished.
|
||||||
while this.write_buf.len() < super::payload::MAX_BUFFER_SIZE {
|
while this.write_buf.total_len() < super::payload::MAX_BUFFER_SIZE {
|
||||||
match body.as_mut().poll_next(cx) {
|
match body.as_mut().poll_next(cx) {
|
||||||
Poll::Ready(Some(Ok(item))) => {
|
Poll::Ready(Some(Ok(item))) => {
|
||||||
this.codec
|
this.codec
|
||||||
.encode(Message::Chunk(Some(item)), this.write_buf)?;
|
.encode_bigbytes(Message::Chunk(Some(item)), this.write_buf)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Poll::Ready(None) => {
|
Poll::Ready(None) => {
|
||||||
this.codec.encode(Message::Chunk(None), this.write_buf)?;
|
this.codec
|
||||||
|
.encode_bigbytes(Message::Chunk(None), this.write_buf)?;
|
||||||
|
|
||||||
// payload stream finished
|
// payload stream finished
|
||||||
// set state to None and handle next message
|
// set state to None and handle next message
|
||||||
|
@ -575,6 +576,7 @@ where
|
||||||
// to service call.
|
// to service call.
|
||||||
Poll::Ready(Ok(req)) => {
|
Poll::Ready(Ok(req)) => {
|
||||||
this.write_buf
|
this.write_buf
|
||||||
|
.buffer_mut()
|
||||||
.extend_from_slice(b"HTTP/1.1 100 Continue\r\n\r\n");
|
.extend_from_slice(b"HTTP/1.1 100 Continue\r\n\r\n");
|
||||||
let fut = this.flow.service.call(req);
|
let fut = this.flow.service.call(req);
|
||||||
this.state.set(State::ServiceCall { fut });
|
this.state.set(State::ServiceCall { fut });
|
||||||
|
@ -1027,7 +1029,7 @@ where
|
||||||
mem::take(this.codec),
|
mem::take(this.codec),
|
||||||
mem::take(this.read_buf),
|
mem::take(this.read_buf),
|
||||||
);
|
);
|
||||||
parts.write_buf = mem::take(this.write_buf);
|
this.write_buf.write_to(&mut parts.write_buf);
|
||||||
let framed = Framed::from_parts(parts);
|
let framed = Framed::from_parts(parts);
|
||||||
this.flow.upgrade.as_ref().unwrap().call((req, framed))
|
this.flow.upgrade.as_ref().unwrap().call((req, framed))
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,9 +6,10 @@ use std::{
|
||||||
slice::from_raw_parts_mut,
|
slice::from_raw_parts_mut,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bytes::{BufMut, BytesMut};
|
use bytes::{BufMut, Bytes, BytesMut};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
big_bytes::BigBytes,
|
||||||
body::BodySize,
|
body::BodySize,
|
||||||
header::{
|
header::{
|
||||||
map::Value, HeaderMap, HeaderName, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING,
|
map::Value, HeaderMap, HeaderName, CONNECTION, CONTENT_LENGTH, DATE, TRANSFER_ENCODING,
|
||||||
|
@ -323,6 +324,14 @@ impl<T: MessageType> MessageEncoder<T> {
|
||||||
self.te.encode(msg, buf)
|
self.te.encode(msg, buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn encode_chunk_bigbytes(
|
||||||
|
&mut self,
|
||||||
|
msg: Bytes,
|
||||||
|
buf: &mut BigBytes,
|
||||||
|
) -> io::Result<bool> {
|
||||||
|
self.te.encode_bigbytes(msg, buf)
|
||||||
|
}
|
||||||
|
|
||||||
/// Encode EOF.
|
/// Encode EOF.
|
||||||
pub fn encode_eof(&mut self, buf: &mut BytesMut) -> io::Result<()> {
|
pub fn encode_eof(&mut self, buf: &mut BytesMut) -> io::Result<()> {
|
||||||
self.te.encode_eof(buf)
|
self.te.encode_eof(buf)
|
||||||
|
@ -414,6 +423,63 @@ impl TransferEncoding {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
/// Encode message. Return `EOF` state of encoder
|
||||||
|
pub(super) fn encode_bigbytes(&mut self, msg: Bytes, buf: &mut BigBytes) -> io::Result<bool> {
|
||||||
|
match self.kind {
|
||||||
|
TransferEncodingKind::Eof => {
|
||||||
|
let eof = msg.is_empty();
|
||||||
|
if msg.len() > 1024 * 64 {
|
||||||
|
buf.put_bytes(msg);
|
||||||
|
} else {
|
||||||
|
buf.buffer_mut().extend_from_slice(&msg);
|
||||||
|
}
|
||||||
|
Ok(eof)
|
||||||
|
}
|
||||||
|
TransferEncodingKind::Chunked(ref mut eof) => {
|
||||||
|
if *eof {
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if msg.is_empty() {
|
||||||
|
*eof = true;
|
||||||
|
buf.buffer_mut().extend_from_slice(b"0\r\n\r\n");
|
||||||
|
} else {
|
||||||
|
writeln!(helpers::MutWriter(buf.buffer_mut()), "{:X}\r", msg.len())
|
||||||
|
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
|
||||||
|
|
||||||
|
if msg.len() > 1024 * 64 {
|
||||||
|
buf.put_bytes(msg);
|
||||||
|
} else {
|
||||||
|
buf.buffer_mut().reserve(msg.len() + 2);
|
||||||
|
buf.buffer_mut().extend_from_slice(&msg);
|
||||||
|
}
|
||||||
|
buf.buffer_mut().extend_from_slice(b"\r\n");
|
||||||
|
}
|
||||||
|
Ok(*eof)
|
||||||
|
}
|
||||||
|
TransferEncodingKind::Length(ref mut remaining) => {
|
||||||
|
if *remaining > 0 {
|
||||||
|
if msg.is_empty() {
|
||||||
|
return Ok(*remaining == 0);
|
||||||
|
}
|
||||||
|
let len = cmp::min(*remaining, msg.len() as u64);
|
||||||
|
|
||||||
|
if len > 1024 * 64 {
|
||||||
|
buf.put_bytes(msg.slice(..len as usize));
|
||||||
|
} else {
|
||||||
|
buf.buffer_mut().extend_from_slice(&msg[..len as usize]);
|
||||||
|
}
|
||||||
|
|
||||||
|
*remaining -= len;
|
||||||
|
Ok(*remaining == 0)
|
||||||
|
} else {
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Encode message. Return `EOF` state of encoder
|
/// Encode message. Return `EOF` state of encoder
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn encode(&mut self, msg: &[u8], buf: &mut BytesMut) -> io::Result<bool> {
|
pub fn encode(&mut self, msg: &[u8], buf: &mut BytesMut) -> io::Result<bool> {
|
||||||
|
|
|
@ -31,6 +31,7 @@
|
||||||
|
|
||||||
pub use http::{uri, uri::Uri, Method, StatusCode, Version};
|
pub use http::{uri, uri::Uri, Method, StatusCode, Version};
|
||||||
|
|
||||||
|
pub mod big_bytes;
|
||||||
pub mod body;
|
pub mod body;
|
||||||
mod builder;
|
mod builder;
|
||||||
mod config;
|
mod config;
|
||||||
|
|
|
@ -4,6 +4,8 @@ use bytestring::ByteString;
|
||||||
use tokio_util::codec::{Decoder, Encoder};
|
use tokio_util::codec::{Decoder, Encoder};
|
||||||
use tracing::error;
|
use tracing::error;
|
||||||
|
|
||||||
|
use crate::big_bytes::BigBytes;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
frame::Parser,
|
frame::Parser,
|
||||||
proto::{CloseReason, OpCode},
|
proto::{CloseReason, OpCode},
|
||||||
|
@ -116,51 +118,55 @@ impl Default for Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Encoder<Message> for Codec {
|
impl Codec {
|
||||||
type Error = ProtocolError;
|
pub fn encode_bigbytes(
|
||||||
|
&mut self,
|
||||||
fn encode(&mut self, item: Message, dst: &mut BytesMut) -> Result<(), Self::Error> {
|
item: Message,
|
||||||
|
dst: &mut BigBytes,
|
||||||
|
) -> Result<(), ProtocolError> {
|
||||||
match item {
|
match item {
|
||||||
Message::Text(txt) => Parser::write_message(
|
Message::Text(txt) => Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
txt,
|
txt.into_bytes(),
|
||||||
OpCode::Text,
|
OpCode::Text,
|
||||||
true,
|
true,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
),
|
),
|
||||||
Message::Binary(bin) => Parser::write_message(
|
Message::Binary(bin) => Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
bin,
|
bin,
|
||||||
OpCode::Binary,
|
OpCode::Binary,
|
||||||
true,
|
true,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
),
|
),
|
||||||
Message::Ping(txt) => Parser::write_message(
|
Message::Ping(txt) => Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
txt,
|
txt,
|
||||||
OpCode::Ping,
|
OpCode::Ping,
|
||||||
true,
|
true,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
),
|
),
|
||||||
Message::Pong(txt) => Parser::write_message(
|
Message::Pong(txt) => Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
txt,
|
txt,
|
||||||
OpCode::Pong,
|
OpCode::Pong,
|
||||||
true,
|
true,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
),
|
),
|
||||||
Message::Close(reason) => {
|
Message::Close(reason) => Parser::write_close(
|
||||||
Parser::write_close(dst, reason, !self.flags.contains(Flags::SERVER))
|
dst.buffer_mut(),
|
||||||
}
|
reason,
|
||||||
|
!self.flags.contains(Flags::SERVER),
|
||||||
|
),
|
||||||
Message::Continuation(cont) => match cont {
|
Message::Continuation(cont) => match cont {
|
||||||
Item::FirstText(data) => {
|
Item::FirstText(data) => {
|
||||||
if self.flags.contains(Flags::W_CONTINUATION) {
|
if self.flags.contains(Flags::W_CONTINUATION) {
|
||||||
return Err(ProtocolError::ContinuationStarted);
|
return Err(ProtocolError::ContinuationStarted);
|
||||||
} else {
|
} else {
|
||||||
self.flags.insert(Flags::W_CONTINUATION);
|
self.flags.insert(Flags::W_CONTINUATION);
|
||||||
Parser::write_message(
|
Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
&data[..],
|
data,
|
||||||
OpCode::Text,
|
OpCode::Text,
|
||||||
false,
|
false,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
|
@ -172,9 +178,9 @@ impl Encoder<Message> for Codec {
|
||||||
return Err(ProtocolError::ContinuationStarted);
|
return Err(ProtocolError::ContinuationStarted);
|
||||||
} else {
|
} else {
|
||||||
self.flags.insert(Flags::W_CONTINUATION);
|
self.flags.insert(Flags::W_CONTINUATION);
|
||||||
Parser::write_message(
|
Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
&data[..],
|
data,
|
||||||
OpCode::Binary,
|
OpCode::Binary,
|
||||||
false,
|
false,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
|
@ -183,9 +189,9 @@ impl Encoder<Message> for Codec {
|
||||||
}
|
}
|
||||||
Item::Continue(data) => {
|
Item::Continue(data) => {
|
||||||
if self.flags.contains(Flags::W_CONTINUATION) {
|
if self.flags.contains(Flags::W_CONTINUATION) {
|
||||||
Parser::write_message(
|
Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
&data[..],
|
data,
|
||||||
OpCode::Continue,
|
OpCode::Continue,
|
||||||
false,
|
false,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
|
@ -197,9 +203,9 @@ impl Encoder<Message> for Codec {
|
||||||
Item::Last(data) => {
|
Item::Last(data) => {
|
||||||
if self.flags.contains(Flags::W_CONTINUATION) {
|
if self.flags.contains(Flags::W_CONTINUATION) {
|
||||||
self.flags.remove(Flags::W_CONTINUATION);
|
self.flags.remove(Flags::W_CONTINUATION);
|
||||||
Parser::write_message(
|
Parser::write_message_bigbytes(
|
||||||
dst,
|
dst,
|
||||||
&data[..],
|
data,
|
||||||
OpCode::Continue,
|
OpCode::Continue,
|
||||||
true,
|
true,
|
||||||
!self.flags.contains(Flags::SERVER),
|
!self.flags.contains(Flags::SERVER),
|
||||||
|
@ -215,6 +221,20 @@ impl Encoder<Message> for Codec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Encoder<Message> for Codec {
|
||||||
|
type Error = ProtocolError;
|
||||||
|
|
||||||
|
fn encode(&mut self, item: Message, dst: &mut BytesMut) -> Result<(), Self::Error> {
|
||||||
|
let mut big_bytes = BigBytes::with_capacity(0);
|
||||||
|
|
||||||
|
self.encode_bigbytes(item, &mut big_bytes)?;
|
||||||
|
|
||||||
|
big_bytes.write_to(dst);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Decoder for Codec {
|
impl Decoder for Codec {
|
||||||
type Item = Frame;
|
type Item = Frame;
|
||||||
type Error = ProtocolError;
|
type Error = ProtocolError;
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
|
|
||||||
use bytes::{Buf, BufMut, BytesMut};
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
|
use crate::big_bytes::BigBytes;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
mask::apply_mask,
|
mask::apply_mask,
|
||||||
proto::{CloseCode, CloseReason, OpCode},
|
proto::{CloseCode, CloseReason, OpCode},
|
||||||
|
@ -156,51 +158,68 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate binary representation
|
pub fn write_message_bigbytes(
|
||||||
pub fn write_message<B: AsRef<[u8]>>(
|
dst: &mut BigBytes,
|
||||||
dst: &mut BytesMut,
|
pl: Bytes,
|
||||||
pl: B,
|
|
||||||
op: OpCode,
|
op: OpCode,
|
||||||
fin: bool,
|
fin: bool,
|
||||||
mask: bool,
|
mask: bool,
|
||||||
) {
|
) {
|
||||||
let payload = pl.as_ref();
|
|
||||||
let one: u8 = if fin {
|
let one: u8 = if fin {
|
||||||
0x80 | Into::<u8>::into(op)
|
0x80 | Into::<u8>::into(op)
|
||||||
} else {
|
} else {
|
||||||
op.into()
|
op.into()
|
||||||
};
|
};
|
||||||
let payload_len = payload.len();
|
let payload_len = pl.len();
|
||||||
let (two, p_len) = if mask {
|
let two = if mask { 0x80 } else { 0 };
|
||||||
(0x80, payload_len + 4)
|
|
||||||
} else {
|
|
||||||
(0, payload_len)
|
|
||||||
};
|
|
||||||
|
|
||||||
if payload_len < 126 {
|
if payload_len < 126 {
|
||||||
dst.reserve(p_len + 2);
|
dst.buffer_mut().reserve(2);
|
||||||
dst.put_slice(&[one, two | payload_len as u8]);
|
dst.buffer_mut().put_slice(&[one, two | payload_len as u8]);
|
||||||
} else if payload_len <= 65_535 {
|
} else if payload_len <= 65_535 {
|
||||||
dst.reserve(p_len + 4);
|
dst.buffer_mut().reserve(4);
|
||||||
dst.put_slice(&[one, two | 126]);
|
dst.buffer_mut().put_slice(&[one, two | 126]);
|
||||||
dst.put_u16(payload_len as u16);
|
dst.buffer_mut().put_u16(payload_len as u16);
|
||||||
} else {
|
} else {
|
||||||
dst.reserve(p_len + 10);
|
dst.buffer_mut().reserve(10);
|
||||||
dst.put_slice(&[one, two | 127]);
|
dst.buffer_mut().put_slice(&[one, two | 127]);
|
||||||
dst.put_u64(payload_len as u64);
|
dst.buffer_mut().put_u64(payload_len as u64);
|
||||||
};
|
};
|
||||||
|
|
||||||
if mask {
|
if mask {
|
||||||
let mask = rand::random::<[u8; 4]>();
|
let mask = rand::random::<[u8; 4]>();
|
||||||
dst.put_slice(mask.as_ref());
|
dst.buffer_mut().put_slice(mask.as_ref());
|
||||||
dst.put_slice(payload.as_ref());
|
|
||||||
let pos = dst.len() - payload_len;
|
match pl.try_into_mut() {
|
||||||
apply_mask(&mut dst[pos..], mask);
|
// Avoid copying bytes by mutating in-place
|
||||||
|
Ok(mut pl_mut) => {
|
||||||
|
apply_mask(&mut pl_mut, mask);
|
||||||
|
dst.put_bytes(pl_mut.freeze());
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need to copy the bytes anyway at this point, so put them in the buffer
|
||||||
|
// directly
|
||||||
|
Err(pl) => {
|
||||||
|
dst.buffer_mut().reserve(pl.len());
|
||||||
|
dst.buffer_mut().put_slice(pl.as_ref());
|
||||||
|
let pos = dst.buffer_mut().len() - payload_len;
|
||||||
|
apply_mask(&mut dst.buffer_mut()[pos..], mask);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
dst.put_slice(payload.as_ref());
|
dst.put_bytes(pl)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Generate binary representation
|
||||||
|
pub fn write_message(dst: &mut BytesMut, pl: Bytes, op: OpCode, fin: bool, mask: bool) {
|
||||||
|
let mut big_bytes = BigBytes::with_capacity(0);
|
||||||
|
|
||||||
|
Self::write_message_bigbytes(&mut big_bytes, pl, op, fin, mask);
|
||||||
|
|
||||||
|
big_bytes.write_to(dst);
|
||||||
|
}
|
||||||
|
|
||||||
/// Create a new Close control frame.
|
/// Create a new Close control frame.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn write_close(dst: &mut BytesMut, reason: Option<CloseReason>, mask: bool) {
|
pub fn write_close(dst: &mut BytesMut, reason: Option<CloseReason>, mask: bool) {
|
||||||
|
@ -215,7 +234,7 @@ impl Parser {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Parser::write_message(dst, payload, OpCode::Close, true, mask)
|
Parser::write_message(dst, Bytes::from(payload), OpCode::Close, true, mask)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,7 +387,13 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_ping_frame() {
|
fn test_ping_frame() {
|
||||||
let mut buf = BytesMut::new();
|
let mut buf = BytesMut::new();
|
||||||
Parser::write_message(&mut buf, Vec::from("data"), OpCode::Ping, true, false);
|
Parser::write_message(
|
||||||
|
&mut buf,
|
||||||
|
Bytes::from(Vec::from("data")),
|
||||||
|
OpCode::Ping,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
let mut v = vec![137u8, 4u8];
|
let mut v = vec![137u8, 4u8];
|
||||||
v.extend(b"data");
|
v.extend(b"data");
|
||||||
|
@ -378,7 +403,13 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_pong_frame() {
|
fn test_pong_frame() {
|
||||||
let mut buf = BytesMut::new();
|
let mut buf = BytesMut::new();
|
||||||
Parser::write_message(&mut buf, Vec::from("data"), OpCode::Pong, true, false);
|
Parser::write_message(
|
||||||
|
&mut buf,
|
||||||
|
Bytes::from(Vec::from("data")),
|
||||||
|
OpCode::Pong,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
let mut v = vec![138u8, 4u8];
|
let mut v = vec![138u8, 4u8];
|
||||||
v.extend(b"data");
|
v.extend(b"data");
|
||||||
|
|
|
@ -18,8 +18,8 @@ all-features = true
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
bytesize = "2"
|
||||||
darling = "0.20"
|
darling = "0.20"
|
||||||
parse-size = "1"
|
|
||||||
proc-macro2 = "1"
|
proc-macro2 = "1"
|
||||||
quote = "1"
|
quote = "1"
|
||||||
syn = "2"
|
syn = "2"
|
||||||
|
@ -27,7 +27,7 @@ syn = "2"
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-multipart = "0.7"
|
actix-multipart = "0.7"
|
||||||
actix-web = "4"
|
actix-web = "4"
|
||||||
rustversion = "1"
|
rustversion-msrv = "0.100"
|
||||||
trybuild = "1"
|
trybuild = "1"
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
|
|
|
@ -9,8 +9,8 @@
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use bytesize::ByteSize;
|
||||||
use darling::{FromDeriveInput, FromField, FromMeta};
|
use darling::{FromDeriveInput, FromField, FromMeta};
|
||||||
use parse_size::parse_size;
|
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use proc_macro2::Ident;
|
use proc_macro2::Ident;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
@ -103,7 +103,7 @@ struct ParsedField<'t> {
|
||||||
/// # Field Limits
|
/// # Field Limits
|
||||||
///
|
///
|
||||||
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
|
/// You can use the `#[multipart(limit = "<size>")]` attribute to set field level limits. The limit
|
||||||
/// string is parsed using [parse_size].
|
/// string is parsed using [`bytesize`].
|
||||||
///
|
///
|
||||||
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
|
/// Note: the form is also subject to the global limits configured using `MultipartFormConfig`.
|
||||||
///
|
///
|
||||||
|
@ -150,7 +150,7 @@ struct ParsedField<'t> {
|
||||||
/// struct Form { }
|
/// struct Form { }
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// [parse_size]: https://docs.rs/parse-size/1/parse_size
|
/// [`bytesize`]: https://docs.rs/bytesize/2
|
||||||
#[proc_macro_derive(MultipartForm, attributes(multipart))]
|
#[proc_macro_derive(MultipartForm, attributes(multipart))]
|
||||||
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||||
let input: syn::DeriveInput = parse_macro_input!(input);
|
let input: syn::DeriveInput = parse_macro_input!(input);
|
||||||
|
@ -191,8 +191,8 @@ pub fn impl_multipart_form(input: proc_macro::TokenStream) -> proc_macro::TokenS
|
||||||
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
|
let attrs = FieldAttrs::from_field(field).map_err(|err| err.write_errors())?;
|
||||||
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
|
let serialization_name = attrs.rename.unwrap_or_else(|| rust_name.to_string());
|
||||||
|
|
||||||
let limit = match attrs.limit.map(|limit| match parse_size(&limit) {
|
let limit = match attrs.limit.map(|limit| match limit.parse::<ByteSize>() {
|
||||||
Ok(size) => Ok(usize::try_from(size).unwrap()),
|
Ok(ByteSize(size)) => Ok(usize::try_from(size).unwrap()),
|
||||||
Err(err) => Err(syn::Error::new(
|
Err(err) => Err(syn::Error::new(
|
||||||
field.ident.as_ref().unwrap().span(),
|
field.ident.as_ref().unwrap().span(),
|
||||||
format!("Could not parse size limit `{}`: {}", limit, err),
|
format!("Could not parse size limit `{}`: {}", limit, err),
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#[rustversion::stable(1.72)] // MSRV
|
#[rustversion_msrv::msrv]
|
||||||
#[test]
|
#[test]
|
||||||
fn compile_macros() {
|
fn compile_macros() {
|
||||||
let t = trybuild::TestCases::new();
|
let t = trybuild::TestCases::new();
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
error: Could not parse size limit `2 bytes`: invalid digit found in string
|
error: Could not parse size limit `2 bytes`: couldn't parse "bytes" into a known SI unit, couldn't parse unit of "bytes"
|
||||||
--> tests/trybuild/size-limit-parse-fail.rs:6:5
|
--> tests/trybuild/size-limit-parse-fail.rs:6:5
|
||||||
|
|
|
|
||||||
6 | description: Text<String>,
|
6 | description: Text<String>,
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
||||||
error: Could not parse size limit `2 megabytes`: invalid digit found in string
|
error: Could not parse size limit `2 megabytes`: couldn't parse "megabytes" into a known SI unit, couldn't parse unit of "megabytes"
|
||||||
--> tests/trybuild/size-limit-parse-fail.rs:12:5
|
--> tests/trybuild/size-limit-parse-fail.rs:12:5
|
||||||
|
|
|
|
||||||
12 | description: Text<String>,
|
12 | description: Text<String>,
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
||||||
error: Could not parse size limit `four meters`: invalid digit found in string
|
error: Could not parse size limit `four meters`: couldn't parse "four meters" into a ByteSize, cannot parse float from empty string
|
||||||
--> tests/trybuild/size-limit-parse-fail.rs:18:5
|
--> tests/trybuild/size-limit-parse-fail.rs:18:5
|
||||||
|
|
|
|
||||||
18 | description: Text<String>,
|
18 | description: Text<String>,
|
||||||
|
|
|
@ -34,7 +34,7 @@ actix-web = "4"
|
||||||
|
|
||||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||||
trybuild = "1"
|
trybuild = "1"
|
||||||
rustversion = "1"
|
rustversion-msrv = "0.100"
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#[rustversion::stable(1.72)] // MSRV
|
#[rustversion_msrv::msrv]
|
||||||
#[test]
|
#[test]
|
||||||
fn compile_macros() {
|
fn compile_macros() {
|
||||||
let t = trybuild::TestCases::new();
|
let t = trybuild::TestCases::new();
|
||||||
|
|
12
justfile
12
justfile
|
@ -7,14 +7,14 @@ fmt:
|
||||||
cargo +nightly fmt
|
cargo +nightly fmt
|
||||||
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
|
fd --hidden --type=file --extension=md --extension=yml --exec-batch npx -y prettier --write
|
||||||
|
|
||||||
# Downgrade dev-dependencies necessary to run MSRV checks/tests.
|
# Downgrade dependencies necessary to run MSRV checks/tests.
|
||||||
[private]
|
[private]
|
||||||
downgrade-for-msrv:
|
downgrade-for-msrv:
|
||||||
cargo update -p=parse-size --precise=1.0.0
|
cargo update -p=clap --precise=4.4.18 # next ver: 1.74.0
|
||||||
cargo update -p=clap --precise=4.4.18
|
cargo update -p=divan --precise=0.1.15 # next ver: 1.80.0
|
||||||
cargo update -p=divan --precise=0.1.15
|
cargo update -p=litemap --precise=0.7.4 # next ver: 1.81.0
|
||||||
cargo update -p=litemap --precise=0.7.4
|
cargo update -p=zerofrom --precise=0.1.5 # next ver: 1.81.0
|
||||||
cargo update -p=zerofrom --precise=0.1.5
|
cargo update -p=half --precise=2.4.1 # next ver: 1.81.0
|
||||||
|
|
||||||
msrv := ```
|
msrv := ```
|
||||||
cargo metadata --format-version=1 \
|
cargo metadata --format-version=1 \
|
||||||
|
|
Loading…
Reference in New Issue