mirror of https://github.com/fafhrd91/actix-web
Compare commits
5 Commits
1e38ce07f1
...
0b4ef0ffd2
Author | SHA1 | Date |
---|---|---|
|
0b4ef0ffd2 | |
|
8996198f2c | |
|
68624ec63b | |
|
bcd0ffb016 | |
|
f7778df597 |
|
@ -2,6 +2,7 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
- Opt-In filesize threshold for faster synchronus reads that allow for 20x better performance.
|
||||
- Minimum supported Rust version (MSRV) is now 1.75.
|
||||
|
||||
## 0.6.6
|
||||
|
|
|
@ -24,6 +24,7 @@ pin_project! {
|
|||
state: ChunkedReadFileState<Fut>,
|
||||
counter: u64,
|
||||
callback: F,
|
||||
read_sync: bool,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,6 +58,7 @@ pub(crate) fn new_chunked_read(
|
|||
size: u64,
|
||||
offset: u64,
|
||||
file: File,
|
||||
size_threshold: u64,
|
||||
) -> impl Stream<Item = Result<Bytes, Error>> {
|
||||
ChunkedReadFile {
|
||||
size,
|
||||
|
@ -69,31 +71,45 @@ pub(crate) fn new_chunked_read(
|
|||
},
|
||||
counter: 0,
|
||||
callback: chunked_read_file_callback,
|
||||
read_sync: size < size_threshold,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
async fn chunked_read_file_callback(
|
||||
fn chunked_read_file_callback_sync(
|
||||
mut file: File,
|
||||
offset: u64,
|
||||
max_bytes: usize,
|
||||
) -> Result<(File, Bytes), Error> {
|
||||
) -> Result<(File, Bytes), io::Error> {
|
||||
use io::{Read as _, Seek as _};
|
||||
|
||||
let res = actix_web::web::block(move || {
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
let mut buf = Vec::with_capacity(max_bytes);
|
||||
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
file.seek(io::SeekFrom::Start(offset))?;
|
||||
|
||||
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
let n_bytes = file.by_ref().take(max_bytes as u64).read_to_end(&mut buf)?;
|
||||
|
||||
if n_bytes == 0 {
|
||||
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
|
||||
} else {
|
||||
Ok((file, Bytes::from(buf)))
|
||||
}
|
||||
})
|
||||
.await??;
|
||||
if n_bytes == 0 {
|
||||
Err(io::Error::from(io::ErrorKind::UnexpectedEof))
|
||||
} else {
|
||||
Ok((file, Bytes::from(buf)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
#[inline]
|
||||
async fn chunked_read_file_callback(
|
||||
file: File,
|
||||
offset: u64,
|
||||
max_bytes: usize,
|
||||
read_sync: bool,
|
||||
) -> Result<(File, Bytes), Error> {
|
||||
let res = if read_sync {
|
||||
chunked_read_file_callback_sync(file, offset, max_bytes)?
|
||||
} else {
|
||||
actix_web::web::block(move || chunked_read_file_callback_sync(file, offset, max_bytes))
|
||||
.await??
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
@ -171,7 +187,7 @@ where
|
|||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
impl<F, Fut> Stream for ChunkedReadFile<F, Fut>
|
||||
where
|
||||
F: Fn(File, u64, usize) -> Fut,
|
||||
F: Fn(File, u64, usize, bool) -> Fut,
|
||||
Fut: Future<Output = Result<(File, Bytes), Error>>,
|
||||
{
|
||||
type Item = Result<Bytes, Error>;
|
||||
|
@ -193,7 +209,7 @@ where
|
|||
.take()
|
||||
.expect("ChunkedReadFile polled after completion");
|
||||
|
||||
let fut = (this.callback)(file, offset, max_bytes);
|
||||
let fut = (this.callback)(file, offset, max_bytes, *this.read_sync);
|
||||
|
||||
this.state
|
||||
.project_replace(ChunkedReadFileState::Future { fut });
|
||||
|
|
|
@ -49,6 +49,7 @@ pub struct Files {
|
|||
use_guards: Option<Rc<dyn Guard>>,
|
||||
guards: Vec<Rc<dyn Guard>>,
|
||||
hidden_files: bool,
|
||||
size_threshold: u64,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Files {
|
||||
|
@ -73,6 +74,7 @@ impl Clone for Files {
|
|||
use_guards: self.use_guards.clone(),
|
||||
guards: self.guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
size_threshold: self.size_threshold,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -119,6 +121,7 @@ impl Files {
|
|||
use_guards: None,
|
||||
guards: Vec::new(),
|
||||
hidden_files: false,
|
||||
size_threshold: 0,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,6 +207,18 @@ impl Files {
|
|||
self
|
||||
}
|
||||
|
||||
/// Sets the async file-size threshold.
|
||||
///
|
||||
/// When a file is larger than the threshold, the reader
|
||||
/// will switch from faster blocking file-reads to slower async reads
|
||||
/// to avoid blocking the main-thread when processing large files.
|
||||
///
|
||||
/// Default is 0, meaning all files are read asyncly.
|
||||
pub fn set_size_threshold(mut self, size: u64) -> Self {
|
||||
self.size_threshold = size;
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies whether to use ETag or not.
|
||||
///
|
||||
/// Default is true.
|
||||
|
@ -367,6 +382,7 @@ impl ServiceFactory<ServiceRequest> for Files {
|
|||
file_flags: self.file_flags,
|
||||
guards: self.use_guards.clone(),
|
||||
hidden_files: self.hidden_files,
|
||||
size_threshold: self.size_threshold,
|
||||
};
|
||||
|
||||
if let Some(ref default) = *self.default.borrow() {
|
||||
|
|
|
@ -80,6 +80,7 @@ pub struct NamedFile {
|
|||
pub(crate) content_type: Mime,
|
||||
pub(crate) content_disposition: ContentDisposition,
|
||||
pub(crate) encoding: Option<ContentEncoding>,
|
||||
pub(crate) size_threshold: u64,
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "experimental-io-uring"))]
|
||||
|
@ -200,6 +201,7 @@ impl NamedFile {
|
|||
encoding,
|
||||
status_code: StatusCode::OK,
|
||||
flags: Flags::default(),
|
||||
size_threshold: 0,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -353,6 +355,18 @@ impl NamedFile {
|
|||
self
|
||||
}
|
||||
|
||||
/// Sets the async file-size threshold.
|
||||
///
|
||||
/// When a file is larger than the threshold, the reader
|
||||
/// will switch from faster blocking file-reads to slower async reads
|
||||
/// to avoid blocking the main-thread when processing large files.
|
||||
///
|
||||
/// Default is 0, meaning all files are read asyncly.
|
||||
pub fn set_size_threshold(mut self, size: u64) -> Self {
|
||||
self.size_threshold = size;
|
||||
self
|
||||
}
|
||||
|
||||
/// Specifies whether to return `ETag` header in response.
|
||||
///
|
||||
/// Default is true.
|
||||
|
@ -440,7 +454,8 @@ impl NamedFile {
|
|||
res.insert_header((header::CONTENT_ENCODING, current_encoding.as_str()));
|
||||
}
|
||||
|
||||
let reader = chunked::new_chunked_read(self.md.len(), 0, self.file);
|
||||
let reader =
|
||||
chunked::new_chunked_read(self.md.len(), 0, self.file, self.size_threshold);
|
||||
|
||||
return res.streaming(reader);
|
||||
}
|
||||
|
@ -577,7 +592,7 @@ impl NamedFile {
|
|||
.map_into_boxed_body();
|
||||
}
|
||||
|
||||
let reader = chunked::new_chunked_read(length, offset, self.file);
|
||||
let reader = chunked::new_chunked_read(length, offset, self.file, self.size_threshold);
|
||||
|
||||
if offset != 0 || length != self.md.len() {
|
||||
res.status(StatusCode::PARTIAL_CONTENT);
|
||||
|
|
|
@ -39,6 +39,7 @@ pub struct FilesServiceInner {
|
|||
pub(crate) file_flags: named::Flags,
|
||||
pub(crate) guards: Option<Rc<dyn Guard>>,
|
||||
pub(crate) hidden_files: bool,
|
||||
pub(crate) size_threshold: u64,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FilesServiceInner {
|
||||
|
@ -70,7 +71,9 @@ impl FilesService {
|
|||
named_file.flags = self.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
let res = named_file
|
||||
.set_size_threshold(self.size_threshold)
|
||||
.into_response(&req);
|
||||
ServiceResponse::new(req, res)
|
||||
}
|
||||
|
||||
|
@ -169,17 +172,7 @@ impl Service<ServiceRequest> for FilesService {
|
|||
}
|
||||
} else {
|
||||
match NamedFile::open_async(&path).await {
|
||||
Ok(mut named_file) => {
|
||||
if let Some(ref mime_override) = this.mime_override {
|
||||
let new_disposition = mime_override(&named_file.content_type.type_());
|
||||
named_file.content_disposition.disposition = new_disposition;
|
||||
}
|
||||
named_file.flags = this.file_flags;
|
||||
|
||||
let (req, _) = req.into_parts();
|
||||
let res = named_file.into_response(&req);
|
||||
Ok(ServiceResponse::new(req, res))
|
||||
}
|
||||
Ok(named_file) => Ok(this.serve_named_file(req, named_file)),
|
||||
Err(err) => this.handle_err(err, req).await,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ edition.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
features = [
|
||||
"http2",
|
||||
"ws",
|
||||
|
@ -119,7 +118,7 @@ tokio-util = { version = "0.7", features = ["io", "codec"] }
|
|||
tracing = { version = "0.1.30", default-features = false, features = ["log"] }
|
||||
|
||||
# http2
|
||||
h2 = { version = "0.3.26", optional = true }
|
||||
h2 = { version = "0.3.27", optional = true }
|
||||
|
||||
# websockets
|
||||
base64 = { version = "0.22", optional = true }
|
||||
|
|
|
@ -11,7 +11,6 @@ edition.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
||||
[lib]
|
||||
|
|
|
@ -14,7 +14,6 @@ license.workspace = true
|
|||
edition.workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
||||
[package.metadata.cargo_check_external_types]
|
||||
|
|
|
@ -17,7 +17,6 @@ edition.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
features = [
|
||||
"macros",
|
||||
"openssl",
|
||||
|
|
|
@ -16,7 +16,6 @@ license = "MIT OR Apache-2.0"
|
|||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
features = [
|
||||
"cookies",
|
||||
"openssl",
|
||||
|
@ -109,7 +108,7 @@ cfg-if = "1"
|
|||
derive_more = { version = "2", features = ["display", "error", "from"] }
|
||||
futures-core = { version = "0.3.17", default-features = false, features = ["alloc"] }
|
||||
futures-util = { version = "0.3.17", default-features = false, features = ["alloc", "sink"] }
|
||||
h2 = "0.3.26"
|
||||
h2 = "0.3.27"
|
||||
http = "0.2.7"
|
||||
itoa = "1"
|
||||
log = "0.4"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# developed on macOS and probably doesn't work on Linux yet due to minor
|
||||
# differences in flags on sed
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# run tests matching what CI does for non-linux feature sets
|
||||
|
||||
set -x
|
||||
|
||||
EXIT=0
|
||||
|
||||
save_exit_code() {
|
||||
eval $@
|
||||
local CMD_EXIT=$?
|
||||
[ "$CMD_EXIT" = "0" ] || EXIT=$CMD_EXIT
|
||||
}
|
||||
|
||||
save_exit_code cargo test --lib --tests -p=actix-router --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-http --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-web --features=rustls,openssl -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-web-codegen --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=awc --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-http-test --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-test --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-files -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-multipart --all-features -- --nocapture
|
||||
save_exit_code cargo test --lib --tests -p=actix-web-actors --all-features -- --nocapture
|
||||
|
||||
save_exit_code cargo test --workspace --doc
|
||||
|
||||
if [ "$EXIT" = "0" ]; then
|
||||
PASSED="All tests passed!"
|
||||
|
||||
if [ "$(command -v figlet)" ]; then
|
||||
figlet "$PASSED"
|
||||
else
|
||||
echo "$PASSED"
|
||||
fi
|
||||
fi
|
||||
|
||||
exit $EXIT
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -Euo pipefail
|
||||
|
||||
for dir in $@; do
|
||||
cd "$dir"
|
||||
|
||||
cargo publish --dry-run
|
||||
|
||||
read -p "Look okay? "
|
||||
read -p "Sure? "
|
||||
|
||||
cargo publish
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo
|
||||
read -p "Was the above error caused by cyclic dev-deps? Choosing yes will publish without a git backreference. (y/N) " publish_no_dev_deps
|
||||
|
||||
if [[ "$publish_no_dev_deps" == "y" || "$publish_no_dev_deps" == "Y" ]]; then
|
||||
cargo hack --no-dev-deps publish --allow-dirty
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ..
|
||||
done
|
Loading…
Reference in New Issue