fix: read full request before replying

This commit is contained in:
neri 2025-11-08 23:22:31 +01:00
commit 34dea1a94e
2 changed files with 20 additions and 10 deletions

View file

@ -25,13 +25,16 @@ pub(crate) struct UploadConfig {
}
pub(crate) async fn parse_multipart(
payload: Multipart,
mut payload: Multipart,
file_path: &Path,
config: &config::Config,
) -> Result<UploadConfig, error::Error> {
match parse_multipart_inner(payload, file_path, config).await {
match parse_multipart_inner(&mut payload, file_path, config).await {
Ok(data) => Ok(data),
Err(err) => {
// read stream to end before replying
while let Ok(Some(_)) = payload.try_next().await {}
match fs::remove_file(file_path).await {
Err(err) if err.kind() != ErrorKind::NotFound => {
log::error!("could not remove file {:?}", err);
@ -44,7 +47,7 @@ pub(crate) async fn parse_multipart(
}
pub(crate) async fn parse_multipart_inner(
mut payload: Multipart,
payload: &mut Multipart,
file_path: &Path,
config: &config::Config,
) -> Result<UploadConfig, error::Error> {
@ -207,7 +210,7 @@ async fn write_to_file(
Ok((written_bytes, first_bytes))
}
fn validate_max_size(written_bytes: u64, max_size: Option<u64>) -> Result<(), Error> {
pub fn validate_max_size(written_bytes: u64, max_size: Option<u64>) -> Result<(), Error> {
if let Some(max_size) = max_size {
if written_bytes > max_size {
return Err(error::ErrorPayloadTooLarge(format!(

View file

@ -4,8 +4,10 @@ use crate::config::Config;
use crate::file_info::FileInfo;
use crate::{file_info, multipart, template};
use actix_multipart::Multipart;
use actix_web::http::header::LOCATION;
use actix_web::http::header::{ContentLength, LOCATION};
use actix_web::web::Header;
use actix_web::{error, web, Error, HttpRequest, HttpResponse};
use futures_util::TryStreamExt;
use rand::{distr::slice::Choose, Rng};
use sqlx::postgres::PgPool;
use std::path::{Path, PathBuf};
@ -26,11 +28,19 @@ pub async fn index(req: HttpRequest, config: web::Data<Config>) -> HttpResponse
pub async fn upload(
req: HttpRequest,
payload: Multipart,
mut payload: Multipart,
db: web::Data<PgPool>,
expiry_watch_sender: web::Data<Sender<()>>,
content_length: Option<Header<ContentLength>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Error> {
if let Some(content_length) = content_length {
if let Err(err) = multipart::validate_max_size(**content_length as u64, config.max_file_size) {
// read stream to end before replying
while let Ok(Some(_)) = payload.try_next().await {}
return Err(err);
}
}
let (file_id, file_path) = create_unique_file_id(&config).await.map_err(|file_err| {
log::error!("could not create file {:?}", file_err);
error::ErrorInternalServerError("could not create file")
@ -95,10 +105,7 @@ async fn create_unique_file_id(
fn gen_file_id() -> String {
let distribution = Choose::new(ID_CHARS).expect("ID_CHARS is not empty");
rand::rng()
.sample_iter(distribution)
.take(5)
.collect()
rand::rng().sample_iter(distribution).take(5).collect()
}
fn get_redirect_url(id: &str, name: Option<&str>) -> String {