mirror of https://github.com/fafhrd91/actix-web
Merge branch 'master' into master
This commit is contained in:
commit
18ce0b644e
|
@ -40,8 +40,9 @@ percent-encoding = "2.1"
|
||||||
pin-project-lite = "0.2.7"
|
pin-project-lite = "0.2.7"
|
||||||
|
|
||||||
# experimental-io-uring
|
# experimental-io-uring
|
||||||
tokio-uring = { version = "0.2", optional = true, features = ["bytes"] }
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
actix-server = "2.1" # ensure matching tokio-uring versions
|
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] }
|
||||||
|
actix-server = { version = "2.1", optional = true } # ensure matching tokio-uring versions
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.7"
|
actix-rt = "2.7"
|
||||||
|
|
|
@ -114,7 +114,7 @@ mod _original {
|
||||||
use std::mem::MaybeUninit;
|
use std::mem::MaybeUninit;
|
||||||
|
|
||||||
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
pub fn parse_headers(src: &mut BytesMut) -> usize {
|
||||||
#![allow(clippy::uninit_assumed_init)]
|
#![allow(invalid_value, clippy::uninit_assumed_init)]
|
||||||
|
|
||||||
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
let mut headers: [HeaderIndex; MAX_HEADERS] =
|
||||||
unsafe { MaybeUninit::uninit().assume_init() };
|
unsafe { MaybeUninit::uninit().assume_init() };
|
||||||
|
|
|
@ -263,10 +263,8 @@ mod tests {
|
||||||
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
assert_not_impl_any!(Payload: Send, Sync, UnwindSafe, RefUnwindSafe);
|
||||||
|
|
||||||
assert_impl_all!(Inner: Unpin, Send, Sync);
|
assert_impl_all!(Inner: Unpin, Send, Sync);
|
||||||
#[rustversion::before(1.60)]
|
// assertion not stable wrt rustc versions yet
|
||||||
assert_not_impl_any!(Inner: UnwindSafe, RefUnwindSafe);
|
// assert_impl_all!(Inner: UnwindSafe, RefUnwindSafe);
|
||||||
#[rustversion::since(1.60)]
|
|
||||||
assert_impl_all!(Inner: UnwindSafe, RefUnwindSafe);
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_unread_data() {
|
async fn test_unread_data() {
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## Unreleased - 2021-xx-xx
|
## Unreleased - 2021-xx-xx
|
||||||
|
### Added
|
||||||
- Add `ServiceRequest::extract` to make it easier to use extractors when writing middlewares. [#2647]
|
- Add `ServiceRequest::extract` to make it easier to use extractors when writing middlewares. [#2647]
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Clear connection-level data on `HttpRequest` drop. [#2742]
|
||||||
|
|
||||||
[#2647]: https://github.com/actix/actix-web/pull/2647
|
[#2647]: https://github.com/actix/actix-web/pull/2647
|
||||||
|
[#2742]: https://github.com/actix/actix-web/pull/2742
|
||||||
|
|
||||||
|
|
||||||
## 4.0.1 - 2022-02-25
|
## 4.0.1 - 2022-02-25
|
||||||
|
|
|
@ -381,12 +381,16 @@ impl Drop for HttpRequest {
|
||||||
inner.app_data.truncate(1);
|
inner.app_data.truncate(1);
|
||||||
|
|
||||||
// Inner is borrowed mut here and; get req data mutably to reduce borrow check. Also
|
// Inner is borrowed mut here and; get req data mutably to reduce borrow check. Also
|
||||||
// we know the req_data Rc will not have any cloned at this point to unwrap is okay.
|
// we know the req_data Rc will not have any clones at this point to unwrap is okay.
|
||||||
Rc::get_mut(&mut inner.extensions)
|
Rc::get_mut(&mut inner.extensions)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.get_mut()
|
.get_mut()
|
||||||
.clear();
|
.clear();
|
||||||
|
|
||||||
|
// We can't use the same trick as req data because the conn_data is held by the
|
||||||
|
// dispatcher, too.
|
||||||
|
inner.conn_data = None;
|
||||||
|
|
||||||
// a re-borrow of pool is necessary here.
|
// a re-borrow of pool is necessary here.
|
||||||
let req = Rc::clone(&self.inner);
|
let req = Rc::clone(&self.inner);
|
||||||
self.app_state().pool().push(req);
|
self.app_state().pool().push(req);
|
||||||
|
@ -761,10 +765,8 @@ mod tests {
|
||||||
assert_eq!(body, Bytes::from_static(b"1"));
|
assert_eq!(body, Bytes::from_static(b"1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// allow deprecated App::data
|
|
||||||
#[allow(deprecated)]
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_extensions_dropped() {
|
async fn test_app_data_dropped() {
|
||||||
struct Tracker {
|
struct Tracker {
|
||||||
pub dropped: bool,
|
pub dropped: bool,
|
||||||
}
|
}
|
||||||
|
@ -780,7 +782,7 @@ mod tests {
|
||||||
let tracker = Rc::new(RefCell::new(Tracker { dropped: false }));
|
let tracker = Rc::new(RefCell::new(Tracker { dropped: false }));
|
||||||
{
|
{
|
||||||
let tracker2 = Rc::clone(&tracker);
|
let tracker2 = Rc::clone(&tracker);
|
||||||
let srv = init_service(App::new().data(10u32).service(web::resource("/").to(
|
let srv = init_service(App::new().service(web::resource("/").to(
|
||||||
move |req: HttpRequest| {
|
move |req: HttpRequest| {
|
||||||
req.extensions_mut().insert(Foo {
|
req.extensions_mut().insert(Foo {
|
||||||
tracker: Rc::clone(&tracker2),
|
tracker: Rc::clone(&tracker2),
|
||||||
|
|
|
@ -53,7 +53,7 @@ use crate::cookie::{Cookie, CookieJar};
|
||||||
/// assert_eq!(resp.status(), StatusCode::OK);
|
/// assert_eq!(resp.status(), StatusCode::OK);
|
||||||
///
|
///
|
||||||
/// let req = test::TestRequest::default().to_http_request();
|
/// let req = test::TestRequest::default().to_http_request();
|
||||||
/// let resp = index(req).await;
|
/// let resp = handler(req).await;
|
||||||
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
/// assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
|
|
@ -20,7 +20,7 @@ use crate::{
|
||||||
/// Stream that reads request line by line.
|
/// Stream that reads request line by line.
|
||||||
pub struct Readlines<T: HttpMessage> {
|
pub struct Readlines<T: HttpMessage> {
|
||||||
stream: Payload<T::Stream>,
|
stream: Payload<T::Stream>,
|
||||||
buff: BytesMut,
|
buf: BytesMut,
|
||||||
limit: usize,
|
limit: usize,
|
||||||
checked_buff: bool,
|
checked_buff: bool,
|
||||||
encoding: &'static Encoding,
|
encoding: &'static Encoding,
|
||||||
|
@ -41,7 +41,7 @@ where
|
||||||
|
|
||||||
Readlines {
|
Readlines {
|
||||||
stream: req.take_payload(),
|
stream: req.take_payload(),
|
||||||
buff: BytesMut::with_capacity(262_144),
|
buf: BytesMut::with_capacity(262_144),
|
||||||
limit: 262_144,
|
limit: 262_144,
|
||||||
checked_buff: true,
|
checked_buff: true,
|
||||||
err: None,
|
err: None,
|
||||||
|
@ -58,7 +58,7 @@ where
|
||||||
fn err(err: ReadlinesError) -> Self {
|
fn err(err: ReadlinesError) -> Self {
|
||||||
Readlines {
|
Readlines {
|
||||||
stream: Payload::None,
|
stream: Payload::None,
|
||||||
buff: BytesMut::new(),
|
buf: BytesMut::new(),
|
||||||
limit: 262_144,
|
limit: 262_144,
|
||||||
checked_buff: true,
|
checked_buff: true,
|
||||||
encoding: UTF_8,
|
encoding: UTF_8,
|
||||||
|
@ -84,7 +84,7 @@ where
|
||||||
// check if there is a newline in the buffer
|
// check if there is a newline in the buffer
|
||||||
if !this.checked_buff {
|
if !this.checked_buff {
|
||||||
let mut found: Option<usize> = None;
|
let mut found: Option<usize> = None;
|
||||||
for (ind, b) in this.buff.iter().enumerate() {
|
for (ind, b) in this.buf.iter().enumerate() {
|
||||||
if *b == b'\n' {
|
if *b == b'\n' {
|
||||||
found = Some(ind);
|
found = Some(ind);
|
||||||
break;
|
break;
|
||||||
|
@ -96,13 +96,13 @@ where
|
||||||
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
||||||
}
|
}
|
||||||
let line = if this.encoding == UTF_8 {
|
let line = if this.encoding == UTF_8 {
|
||||||
str::from_utf8(&this.buff.split_to(ind + 1))
|
str::from_utf8(&this.buf.split_to(ind + 1))
|
||||||
.map_err(|_| ReadlinesError::EncodingError)?
|
.map_err(|_| ReadlinesError::EncodingError)?
|
||||||
.to_owned()
|
.to_owned()
|
||||||
} else {
|
} else {
|
||||||
this.encoding
|
this.encoding
|
||||||
.decode_without_bom_handling_and_without_replacement(
|
.decode_without_bom_handling_and_without_replacement(
|
||||||
&this.buff.split_to(ind + 1),
|
&this.buf.split_to(ind + 1),
|
||||||
)
|
)
|
||||||
.map(Cow::into_owned)
|
.map(Cow::into_owned)
|
||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
|
@ -141,32 +141,32 @@ where
|
||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
};
|
};
|
||||||
// extend buffer with rest of the bytes;
|
// extend buffer with rest of the bytes;
|
||||||
this.buff.extend_from_slice(&bytes);
|
this.buf.extend_from_slice(&bytes);
|
||||||
this.checked_buff = false;
|
this.checked_buff = false;
|
||||||
return Poll::Ready(Some(Ok(line)));
|
return Poll::Ready(Some(Ok(line)));
|
||||||
}
|
}
|
||||||
this.buff.extend_from_slice(&bytes);
|
this.buf.extend_from_slice(&bytes);
|
||||||
Poll::Pending
|
Poll::Pending
|
||||||
}
|
}
|
||||||
|
|
||||||
None => {
|
None => {
|
||||||
if this.buff.is_empty() {
|
if this.buf.is_empty() {
|
||||||
return Poll::Ready(None);
|
return Poll::Ready(None);
|
||||||
}
|
}
|
||||||
if this.buff.len() > this.limit {
|
if this.buf.len() > this.limit {
|
||||||
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
return Poll::Ready(Some(Err(ReadlinesError::LimitOverflow)));
|
||||||
}
|
}
|
||||||
let line = if this.encoding == UTF_8 {
|
let line = if this.encoding == UTF_8 {
|
||||||
str::from_utf8(&this.buff)
|
str::from_utf8(&this.buf)
|
||||||
.map_err(|_| ReadlinesError::EncodingError)?
|
.map_err(|_| ReadlinesError::EncodingError)?
|
||||||
.to_owned()
|
.to_owned()
|
||||||
} else {
|
} else {
|
||||||
this.encoding
|
this.encoding
|
||||||
.decode_without_bom_handling_and_without_replacement(&this.buff)
|
.decode_without_bom_handling_and_without_replacement(&this.buf)
|
||||||
.map(Cow::into_owned)
|
.map(Cow::into_owned)
|
||||||
.ok_or(ReadlinesError::EncodingError)?
|
.ok_or(ReadlinesError::EncodingError)?
|
||||||
};
|
};
|
||||||
this.buff.clear();
|
this.buf.clear();
|
||||||
Poll::Ready(Some(Ok(line)))
|
Poll::Ready(Some(Ok(line)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue