Compare commits

..

3 Commits

8 changed files with 585 additions and 408 deletions

779
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,34 +1,34 @@
[package] [package]
name = "datatrash" name = "datatrash"
version = "1.1.6" version = "1.1.2"
authors = ["neri"] authors = ["neri"]
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
actix-web = { version = "4.2.1", default-features = false, features = [ actix-web = { version = "4.0.1", default-features = false, features = [
"macros", "macros",
"compress-gzip", "compress-gzip",
"compress-zstd", "compress-zstd",
] } ] }
sqlx = { version = "0.6.2", default-features = false, features = [ sqlx = { version = "0.5.13", default-features = false, features = [
"runtime-tokio-rustls", "runtime-tokio-rustls",
"postgres", "postgres",
"time", "time",
] } ] }
env_logger = "0.9.1" env_logger = "0.9.0"
log = "0.4.17" log = "0.4.16"
actix-files = "0.6.2" actix-files = "0.6.0"
tokio = { version = "1.21.2", features = ["rt", "macros", "sync"] } tokio = { version = "1.17.0", features = ["rt", "macros", "sync"] }
actix-multipart = "0.4.0" actix-multipart = "0.4.0"
futures-util = "0.3.24" futures-util = "0.3.21"
rand = "0.8.5" rand = "0.8.5"
time = "0.3.14" time = "0.2.27"
htmlescape = "0.3.1" htmlescape = "0.3.1"
urlencoding = "2.1.2" urlencoding = "2.1.0"
tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] } tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] }
mime = "0.3.16" mime = "0.3.16"
url = "2.3.1" url = "2.2.2"
actix-governor = "0.3.2" actix-governor = "0.3.1"
governor = "0.4.2" governor = "0.4.2"

View File

@ -1,7 +1,6 @@
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use time::ext::NumericalDuration;
use time::Duration; use time::Duration;
use tokio::fs; use tokio::fs;
@ -29,8 +28,8 @@ pub async fn get_config() -> Config {
let max_file_size = env::var("UPLOAD_MAX_BYTES") let max_file_size = env::var("UPLOAD_MAX_BYTES")
.ok() .ok()
.and_then(|variable| variable.parse().ok()) .and_then(|variable| variable.parse().ok())
.or(Some(8 * 1024 * 1024)) .unwrap_or(8 * 1024 * 1024);
.filter(|&max_file_size| max_file_size != 0); let max_file_size = (max_file_size != 0).then(|| max_file_size);
let static_dir = let static_dir =
PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned())); PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned()));
@ -74,8 +73,8 @@ fn get_no_auth_limits() -> Option<NoAuthLimits> {
(Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => { (Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => {
Some(NoAuthLimits { Some(NoAuthLimits {
auth_password, auth_password,
max_time: (max_time as i64).seconds(), max_time: Duration::seconds(max_time as i64),
large_file_max_time: (large_file_max_time as i64).seconds(), large_file_max_time: Duration::seconds(large_file_max_time as i64),
large_file_size, large_file_size,
}) })
} }

View File

@ -1,6 +1,5 @@
use sqlx::postgres::{PgPool, PgPoolOptions}; use sqlx::postgres::{PgPool, PgPoolOptions};
use std::env; use std::env;
use time::ext::NumericalStdDuration;
pub async fn setup_db() -> PgPool { pub async fn setup_db() -> PgPool {
let conn_url = &get_db_url(); let conn_url = &get_db_url();
@ -8,7 +7,7 @@ pub async fn setup_db() -> PgPool {
let pool = PgPoolOptions::new() let pool = PgPoolOptions::new()
.max_connections(5) .max_connections(5)
.acquire_timeout(5.std_seconds()) .connect_timeout(std::time::Duration::from_secs(5))
.connect(conn_url) .connect(conn_url)
.await .await
.expect("could not create db pool"); .expect("could not create db pool");

View File

@ -1,19 +1,17 @@
use std::{path::PathBuf, str::FromStr, time::SystemTime}; use std::{path::PathBuf, str::FromStr};
use actix_files::NamedFile; use actix_files::NamedFile;
use actix_web::{ use actix_web::{
error, error,
http::header::{ http::header::{
Accept, CacheControl, CacheDirective, Charset, ContentDisposition, DispositionParam, Accept, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue,
DispositionType, Expires, ExtendedValue, Header, HeaderValue, HttpDate, TryIntoHeaderValue, Header, HeaderValue, CONTENT_TYPE, VARY,
ACCEPT, CACHE_CONTROL, CONTENT_TYPE, EXPIRES, VARY,
}, },
web, Error, HttpRequest, HttpResponse, web, Error, HttpRequest, HttpResponse,
}; };
use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML}; use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use std::path::Path; use std::path::Path;
use time::OffsetDateTime;
use tokio::fs; use tokio::fs;
use url::Url; use url::Url;
@ -37,19 +35,16 @@ pub async fn download(
config: web::Data<Config>, config: web::Data<Config>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let id = req.match_info().query("id"); let id = req.match_info().query("id");
let (file_id, file_name, valid_till, content_type, delete) = load_file_info(id, &db).await?; let (file_id, file_name, content_type, delete) = load_file_info(id, &db).await?;
let mut path = config.files_dir.clone(); let mut path = config.files_dir.clone();
path.push(&file_id); path.push(&file_id);
let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM); let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM);
let mut response = match get_view_type(&req, &mime, &path, delete).await { let response = match get_view_type(&req, &mime, &path, delete).await {
ViewType::Raw => build_file_response(false, &file_name, path, mime, &req).await, ViewType::Raw => build_file_response(false, &file_name, path, mime, &req),
ViewType::Download => build_file_response(true, &file_name, path, mime, &req).await, ViewType::Download => build_file_response(true, &file_name, path, mime, &req),
ViewType::Html => build_text_response(&path).await, ViewType::Html => build_text_response(&path).await,
}?; };
insert_cache_headers(&mut response, valid_till);
if delete { if delete {
deleter::delete_by_id(&db, &file_id, &config.files_dir) deleter::delete_by_id(&db, &file_id, &config.files_dir)
.await .await
@ -58,16 +53,15 @@ pub async fn download(
error::ErrorInternalServerError("could not delete file") error::ErrorInternalServerError("could not delete file")
})?; })?;
} }
response
Ok(response)
} }
async fn load_file_info( async fn load_file_info(
id: &str, id: &str,
db: &web::Data<sqlx::Pool<sqlx::Postgres>>, db: &web::Data<sqlx::Pool<sqlx::Postgres>>,
) -> Result<(String, String, OffsetDateTime, String, bool), Error> { ) -> Result<(String, String, String, bool), Error> {
sqlx::query_as( sqlx::query_as(
"SELECT file_id, file_name, valid_till, content_type, delete_on_download from files WHERE file_id = $1", "SELECT file_id, file_name, content_type, delete_on_download from files WHERE file_id = $1",
) )
.bind(id) .bind(id)
.fetch_optional(db.as_ref()) .fetch_optional(db.as_ref())
@ -142,7 +136,7 @@ async fn build_text_response(path: &Path) -> Result<HttpResponse, Error> {
.body(html)) .body(html))
} }
async fn build_file_response( fn build_file_response(
download: bool, download: bool,
file_name: &str, file_name: &str,
path: PathBuf, path: PathBuf,
@ -164,7 +158,6 @@ async fn build_file_response(
})? })?
.set_content_type(mime) .set_content_type(mime)
.set_content_disposition(content_disposition); .set_content_disposition(content_disposition);
let mut response = file.into_response(req); let mut response = file.into_response(req);
add_headers(req, download, &mut response); add_headers(req, download, &mut response);
Ok(response) Ok(response)
@ -201,33 +194,3 @@ fn get_disposition_params(filename: &str) -> Vec<DispositionParam> {
} }
parameters parameters
} }
fn insert_cache_headers(response: &mut HttpResponse, valid_till: OffsetDateTime) {
if response.status().is_success() {
let valid_duration = valid_till - OffsetDateTime::now_utc();
let valid_cache_seconds = valid_duration.whole_seconds().clamp(0, u32::MAX as i64) as u32;
response.headers_mut().insert(
CACHE_CONTROL,
CacheControl(vec![
CacheDirective::Public,
CacheDirective::MustRevalidate,
CacheDirective::MaxAge(valid_cache_seconds), // todo: expiry in seconds
CacheDirective::NoTransform,
CacheDirective::Extension("immutable".to_owned(), None),
])
.try_into_value()
.unwrap(),
);
response.headers_mut().insert(
EXPIRES,
Expires(HttpDate::from(
SystemTime::now() + std::time::Duration::from_secs(valid_cache_seconds.into()),
))
.try_into_value()
.unwrap(),
);
}
response
.headers_mut()
.insert(VARY, HeaderValue::from_name(ACCEPT));
}

View File

@ -4,11 +4,12 @@ use actix_web::{error, http::header::DispositionParam, Error};
use futures_util::{StreamExt, TryStreamExt}; use futures_util::{StreamExt, TryStreamExt};
use mime::{Mime, TEXT_PLAIN}; use mime::{Mime, TEXT_PLAIN};
use std::path::Path; use std::path::Path;
use time::{Duration, OffsetDateTime}; use time::OffsetDateTime;
use time::{ext::NumericalDuration, Duration};
use tokio::{fs::File, io::AsyncWriteExt}; use tokio::{fs::File, io::AsyncWriteExt};
const MAX_UPLOAD_DURATION: Duration = Duration::days(31); const MAX_UPLOAD_SECONDS: i64 = 31 * 24 * 60 * 60;
const DEFAULT_UPLOAD_DURATION: Duration = Duration::minutes(30); const DEFAULT_UPLOAD_SECONDS: u32 = 30 * 60;
pub(crate) struct UploadConfig { pub(crate) struct UploadConfig {
pub original_name: Option<String>, pub original_name: Option<String>,
@ -24,7 +25,7 @@ pub(crate) async fn parse_multipart(
) -> Result<UploadConfig, error::Error> { ) -> Result<UploadConfig, error::Error> {
let mut original_name: Option<String> = None; let mut original_name: Option<String> = None;
let mut content_type: Option<Mime> = None; let mut content_type: Option<Mime> = None;
let mut keep_for_seconds: Option<String> = None; let mut keep_for: Option<String> = None;
let mut delete_on_download = false; let mut delete_on_download = false;
let mut password = None; let mut password = None;
let mut size = 0; let mut size = 0;
@ -34,7 +35,7 @@ pub(crate) async fn parse_multipart(
let name = name.as_str(); let name = name.as_str();
match name { match name {
"keep_for" => { "keep_for" => {
keep_for_seconds = Some(parse_string(name, field).await?); keep_for = Some(parse_string(name, field).await?);
} }
"file" => { "file" => {
let (mime, uploaded_name) = get_file_metadata(&field); let (mime, uploaded_name) = get_file_metadata(&field);
@ -64,13 +65,13 @@ pub(crate) async fn parse_multipart(
let content_type = let content_type =
content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?; content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?;
let keep_for = keep_for_seconds let keep_for: u32 = keep_for
.map(|k| k.parse()) .map(|k| k.parse())
.transpose() .transpose()
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))? .map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
.map(Duration::seconds) .unwrap_or(DEFAULT_UPLOAD_SECONDS);
.unwrap_or(DEFAULT_UPLOAD_DURATION); let valid_duration = keep_for.seconds();
let valid_till = OffsetDateTime::now_utc() + keep_for; let valid_till = OffsetDateTime::now_utc() + valid_duration;
let upload_config = UploadConfig { let upload_config = UploadConfig {
original_name, original_name,
@ -79,7 +80,7 @@ pub(crate) async fn parse_multipart(
delete_on_download, delete_on_download,
}; };
check_requirements(&upload_config, size, password, &keep_for, config)?; check_requirements(&upload_config, size, password, &valid_duration, config)?;
Ok(upload_config) Ok(upload_config)
} }
@ -88,7 +89,7 @@ fn check_requirements(
upload_config: &UploadConfig, upload_config: &UploadConfig,
size: u64, size: u64,
password: Option<String>, password: Option<String>,
keep_for: &Duration, valid_duration: &Duration,
config: &config::Config, config: &config::Config,
) -> Result<(), error::Error> { ) -> Result<(), error::Error> {
if let Some(original_name) = upload_config.original_name.as_ref() { if let Some(original_name) = upload_config.original_name.as_ref() {
@ -97,16 +98,17 @@ fn check_requirements(
} }
} }
if *keep_for > MAX_UPLOAD_DURATION { let valid_seconds = valid_duration.whole_seconds();
if valid_seconds > MAX_UPLOAD_SECONDS {
return Err(error::ErrorBadRequest(format!( return Err(error::ErrorBadRequest(format!(
"maximum allowed validity is {}, but you specified {}", "maximum allowed validity is {} seconds, but you specified {} seconds",
MAX_UPLOAD_DURATION, keep_for MAX_UPLOAD_SECONDS, valid_seconds
))); )));
} }
if let Some(no_auth_limits) = &config.no_auth_limits { if let Some(no_auth_limits) = &config.no_auth_limits {
let requires_auth = *keep_for > no_auth_limits.max_time let requires_auth = valid_seconds > no_auth_limits.max_time.whole_seconds()
|| *keep_for > no_auth_limits.large_file_max_time || valid_seconds > no_auth_limits.large_file_max_time.whole_seconds()
&& size > no_auth_limits.large_file_size; && size > no_auth_limits.large_file_size;
// hIGh sECUriTy paSsWoRD CHEck // hIGh sECUriTy paSsWoRD CHEck
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) { if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {

View File

@ -14,6 +14,11 @@ impl KeyExtractor for ForwardedPeerIpKeyExtractor {
type Key = IpAddr; type Key = IpAddr;
type KeyExtractionError = &'static str; type KeyExtractionError = &'static str;
#[cfg(feature = "log")]
fn name(&self) -> &'static str {
"Forwarded peer IP"
}
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> { fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
let forwarded_for = req.headers().get("x-forwarded-for"); let forwarded_for = req.headers().get("x-forwarded-for");
if !self.proxied && forwarded_for.is_some() { if !self.proxied && forwarded_for.is_some() {
@ -39,7 +44,8 @@ impl KeyExtractor for ForwardedPeerIpKeyExtractor {
) )
} }
fn response_error(&self, err: Self::KeyExtractionError) -> actix_web::Error { #[cfg(feature = "log")]
actix_web::error::ErrorUnauthorized(err.to_string()) fn key_name(&self, key: &Self::Key) -> Option<String> {
Some(key.to_string())
} }
} }

View File

@ -12,13 +12,15 @@ const AUTH_SNIPPET_HTML: &str = include_str!("../snippet/auth.html.snippet");
const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet"); const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet");
pub async fn write_prefillable_templates(config: &Config) { pub async fn write_prefillable_templates(config: &Config) {
let index_html = build_index_html(config);
let auth_hide_js = build_auth_hide_js(config);
let index_path = config.static_dir.join("index.html"); let index_path = config.static_dir.join("index.html");
fs::write(index_path, build_index_html(config)) let auth_hide_path = config.static_dir.join("auth-hide.js");
fs::write(index_path, index_html)
.await .await
.expect("could not write index.html to static folder"); .expect("could not write index.html to static folder");
if let Some(auth_hide_js) = auth_hide_js {
let auth_hide_path = config.static_dir.join("auth-hide.js");
if let Some(auth_hide_js) = build_auth_hide_js(config) {
fs::write(auth_hide_path, auth_hide_js) fs::write(auth_hide_path, auth_hide_js)
.await .await
.expect("could not write auth-hide.js to static folder"); .expect("could not write auth-hide.js to static folder");
@ -31,30 +33,45 @@ pub async fn write_prefillable_templates(config: &Config) {
} }
fn build_index_html(config: &Config) -> String { fn build_index_html(config: &Config) -> String {
let mut html = INDEX_HTML.to_owned(); let auth_snippet = config
if let Some(limit) = config.no_auth_limits.as_ref() { .no_auth_limits
html = html .as_ref()
.replace("{auth_snippet}", AUTH_SNIPPET_HTML.trim_end()) .map_or("", |_| AUTH_SNIPPET_HTML);
.replace("{auth_time}", &render_duration(limit.max_time)) let max_size_snippet = config
.replace( .max_file_size
"{auth_large_time}", .as_ref()
&render_duration(limit.large_file_max_time), .map_or("", |_| MAX_SIZE_SNIPPET_HTML);
) INDEX_HTML
.replace( .replace("{max_size_snippet}", max_size_snippet)
"{auth_large_size}", .replace(
&render_file_size(limit.large_file_size), "{max_size}",
); &render_file_size(config.max_file_size.unwrap_or(0)),
} else { )
html = html.replace("{auth_snippet}", ""); .replace("{auth_snippet}", auth_snippet)
} .replace(
if let Some(max_file_size) = config.max_file_size { "{auth_time}",
html = html &config
.replace("{max_size_snippet}", MAX_SIZE_SNIPPET_HTML.trim_end()) .no_auth_limits
.replace("{max_size}", &render_file_size(max_file_size)); .as_ref()
} else { .map(|limit| limit.max_time)
html = html.replace("{max_size_snippet}", "") .map_or("".into(), render_duration),
}; )
html .replace(
"{auth_large_time}",
&config
.no_auth_limits
.as_ref()
.map(|limit| limit.large_file_max_time)
.map_or("".into(), render_duration),
)
.replace(
"{auth_large_size}",
&config
.no_auth_limits
.as_ref()
.map(|limit| limit.large_file_size)
.map_or("".into(), render_file_size),
)
} }
fn render_file_size(size: u64) -> String { fn render_file_size(size: u64) -> String {