mirror of
https://github.com/DefectingCat/candy
synced 2025-07-15 16:51:34 +00:00
implement zstd compress
This commit is contained in:
@ -1,4 +1,10 @@
|
||||
use hyper::body::Bytes;
|
||||
use std::{collections::BTreeMap, sync::OnceLock, sync::RwLock};
|
||||
|
||||
static CACHE: OnceLock<RwLock<BTreeMap<String, Cache>>> = OnceLock::new();
|
||||
pub fn get_cache() -> &'static RwLock<BTreeMap<String, Cache>> {
|
||||
CACHE.get_or_init(|| RwLock::new(BTreeMap::new()))
|
||||
}
|
||||
|
||||
pub struct Cache {
|
||||
last_modified: u64,
|
||||
|
@ -1,9 +1,6 @@
|
||||
use std::time::UNIX_EPOCH;
|
||||
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
get_cache,
|
||||
};
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
use futures_util::TryStreamExt;
|
||||
use http_body_util::{combinators::BoxBody, BodyExt, Full, StreamBody};
|
||||
@ -13,7 +10,7 @@ use hyper::{
|
||||
};
|
||||
|
||||
use tokio::{fs::File, io::AsyncReadExt};
|
||||
use tokio_util::{bytes::BytesMut, io::ReaderStream};
|
||||
use tokio_util::io::ReaderStream;
|
||||
use tracing::error;
|
||||
|
||||
pub type CandyBody<T, E = Error> = BoxBody<T, E>;
|
||||
@ -26,7 +23,7 @@ pub type CandyBody<T, E = Error> = BoxBody<T, E>;
|
||||
/// ## Arguments
|
||||
///
|
||||
/// `path`: local file path
|
||||
pub async fn handle_file(path: &str, headers: &mut HeaderMap) -> Result<CandyBody<Bytes>> {
|
||||
pub async fn handle_file(path: &str, headers: &mut HeaderMap) -> Result<Vec<u8>> {
|
||||
// Open file for reading
|
||||
let file = File::open(path).await;
|
||||
let mut file = match file {
|
||||
@ -56,11 +53,11 @@ pub async fn handle_file(path: &str, headers: &mut HeaderMap) -> Result<CandyBod
|
||||
}
|
||||
} */
|
||||
|
||||
read_file(&mut file, size).await
|
||||
read_file_bytes(&mut file, size).await
|
||||
}
|
||||
|
||||
/// Open then use `ReaderStream` to stream to client.
|
||||
/// Stream a file more suit large file, but its slower than read file to memory.
|
||||
/// Stream a file more suitable for large file, but its slower than read file to memory.
|
||||
pub async fn stream_file(file: File) -> Result<CandyBody<Bytes>> {
|
||||
// Wrap to a tokio_util::io::ReaderStream
|
||||
let reader_stream = ReaderStream::new(file);
|
||||
@ -71,12 +68,16 @@ pub async fn stream_file(file: File) -> Result<CandyBody<Bytes>> {
|
||||
Ok(boxed_body)
|
||||
}
|
||||
|
||||
/// Open local file to memory
|
||||
pub async fn read_file(file: &mut File, size: u64) -> Result<CandyBody<Bytes>> {
|
||||
pub async fn read_file_bytes(file: &mut File, size: u64) -> Result<Vec<u8>> {
|
||||
let mut buffer = vec![0u8; size.try_into()?];
|
||||
file.read_exact(&mut buffer[..]).await?;
|
||||
let bytes = Bytes::from_iter(buffer);
|
||||
let body = Full::new(bytes).map_err(|e| match e {}).boxed();
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Open local file to memory
|
||||
pub async fn read_file(file: &mut File, size: u64) -> Result<CandyBody<Bytes>> {
|
||||
let bytes = read_file_bytes(file, size).await?;
|
||||
let body = Full::new(bytes.into()).map_err(|e| match e {}).boxed();
|
||||
Ok(body)
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
use std::{collections::BTreeMap, sync::OnceLock, sync::RwLock};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
use tokio::task::JoinSet;
|
||||
@ -18,11 +16,6 @@ mod http;
|
||||
mod service;
|
||||
mod utils;
|
||||
|
||||
static CACHE: OnceLock<RwLock<BTreeMap<String, u64>>> = OnceLock::new();
|
||||
pub fn get_cache() -> &'static RwLock<BTreeMap<String, u64>> {
|
||||
CACHE.get_or_init(|| RwLock::new(BTreeMap::new()))
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
init_logger();
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
path::Path,
|
||||
pin::pin,
|
||||
time::{self, Duration, Instant},
|
||||
@ -7,12 +8,13 @@ use std::{
|
||||
use crate::{
|
||||
error::{Error, Result},
|
||||
http::{handle_file, internal_server_error, not_found, CandyBody},
|
||||
utils::{find_route, parse_assets_path},
|
||||
utils::{find_route, parse_assets_path, zstd::compress},
|
||||
};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use futures_util::Future;
|
||||
|
||||
use http_body_util::{BodyExt, Full};
|
||||
use hyper::{
|
||||
body::{Bytes, Incoming as IncomingBody},
|
||||
server::conn::http1,
|
||||
@ -120,20 +122,6 @@ async fn handle_connection(
|
||||
}
|
||||
};
|
||||
|
||||
// prepare compress
|
||||
let accept_encoding = req.headers().get("Accept-Encoding");
|
||||
match accept_encoding {
|
||||
Some(accept) => {
|
||||
let accept = accept.to_str()?;
|
||||
dbg!(accept);
|
||||
match accept {
|
||||
str if str.contains("zstd") => {}
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
None => todo!(),
|
||||
}
|
||||
|
||||
// build the response for client
|
||||
let mut response = Response::builder();
|
||||
let headers = response.headers_mut().ok_or(InternalServerError(anyhow!(
|
||||
@ -141,7 +129,27 @@ async fn handle_connection(
|
||||
)))?;
|
||||
headers.insert("Content-Type", "text/html".parse()?);
|
||||
|
||||
let body = handle_file(&path, headers).await?;
|
||||
// file bytes
|
||||
let bytes = handle_file(&path, headers).await?;
|
||||
|
||||
// prepare compress
|
||||
let accept_encoding = req.headers().get("Accept-Encoding");
|
||||
let bytes = match accept_encoding {
|
||||
Some(accept) => {
|
||||
let accept = accept.to_str()?;
|
||||
debug!(accept);
|
||||
match accept {
|
||||
str if str.contains("zstd") => {
|
||||
headers.insert("Content-Encoding", "zstd".parse()?);
|
||||
compress(&bytes).await?
|
||||
}
|
||||
_ => bytes,
|
||||
}
|
||||
}
|
||||
None => bytes,
|
||||
};
|
||||
|
||||
let body = Full::new(bytes.into()).map_err(|e| match e {}).boxed();
|
||||
// http method handle
|
||||
let res = match *req_method {
|
||||
Method::GET => response.body(body)?,
|
||||
|
Reference in New Issue
Block a user