Compare commits

...

9 Commits

14 changed files with 719 additions and 652 deletions

877
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,32 +1,34 @@
[package] [package]
name = "datatrash" name = "datatrash"
version = "1.1.2" version = "1.1.6"
authors = ["neri"] authors = ["neri"]
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
actix-web = { version = "4.0.1", default-features = false, features = [ actix-web = { version = "4.2.1", default-features = false, features = [
"macros", "macros",
"compress-gzip", "compress-gzip",
"compress-zstd", "compress-zstd",
] } ] }
sqlx = { version = "0.5.13", default-features = false, features = [ sqlx = { version = "0.6.2", default-features = false, features = [
"runtime-tokio-rustls", "runtime-tokio-rustls",
"postgres", "postgres",
"time", "time",
] } ] }
env_logger = "0.9.0" env_logger = "0.9.1"
log = "0.4.16" log = "0.4.17"
actix-files = "0.6.0" actix-files = "0.6.2"
tokio = { version = "1.17.0", features = ["rt", "macros", "sync"] } tokio = { version = "1.21.2", features = ["rt", "macros", "sync"] }
actix-multipart = "0.4.0" actix-multipart = "0.4.0"
futures-util = "0.3.21" futures-util = "0.3.24"
rand = "0.8.5" rand = "0.8.5"
time = "0.2.27" time = "0.3.14"
htmlescape = "0.3.1" htmlescape = "0.3.1"
urlencoding = "2.1.0" urlencoding = "2.1.2"
tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] } tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] }
mime = "0.3.16" mime = "0.3.16"
url = "2.2.2" url = "2.3.1"
actix-governor = "0.3.2"
governor = "0.4.2"

View File

@ -18,12 +18,16 @@ To run the software directly, use the compiling instructions below.
### General configuration ### General configuration
| environment variable | default value | | environment variable | default value | description |
| -------------------- | -------------- | | --------------------- | -------------- | ---------------------------------------------- |
| STATIC_DIR | ./static | | STATIC_DIR | ./static | directory to generate "static" files into |
| FILES_DIR | ./files | | FILES_DIR | ./files | directory to save uploaded files into |
| UPLOAD_MAX_BYTES | 8388608 (8MiB) | | UPLOAD_MAX_BYTES | 8388608 (8MiB) | maximum size for uploaded files |
| BIND_ADDRESS | 0.0.0.0:8000 | | BIND_ADDRESS | 0.0.0.0:8000 | address to bind the server to |
| RATE_LIMIT | true | whether download rate should be limited |
| RATE_LIMIT_PROXIED | false | whether rate limit should read x-forwarded-for |
| RATE_LIMIT_PER_SECOND | 60 | seconds to wait between requests |
| RATE_LIMIT_BURST | 1440 | allowed request burst |
### Database configuration ### Database configuration

View File

@ -10,3 +10,5 @@ CREATE TABLE IF NOT EXISTS files (
ALTER TABLE files ADD COLUMN IF NOT EXISTS delete_on_download boolean; ALTER TABLE files ADD COLUMN IF NOT EXISTS delete_on_download boolean;
ALTER TABLE files ALTER COLUMN delete_on_download set not null; ALTER TABLE files ALTER COLUMN delete_on_download set not null;
ALTER TABLE files ALTER COLUMN valid_till TYPE timestamptz; ALTER TABLE files ALTER COLUMN valid_till TYPE timestamptz;
ALTER TABLE files DROP COLUMN IF EXISTS kind;
ALTER TABLE files ADD COLUMN IF NOT EXISTS content_type varchar(255);

View File

@ -1,6 +1,7 @@
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use time::ext::NumericalDuration;
use time::Duration; use time::Duration;
use tokio::fs; use tokio::fs;
@ -10,6 +11,10 @@ pub struct Config {
pub files_dir: PathBuf, pub files_dir: PathBuf,
pub max_file_size: Option<u64>, pub max_file_size: Option<u64>,
pub no_auth_limits: Option<NoAuthLimits>, pub no_auth_limits: Option<NoAuthLimits>,
pub enable_rate_limit: bool,
pub proxied: bool,
pub rate_limit_per_second: u64,
pub rate_limit_burst: u32,
} }
#[derive(Clone)] #[derive(Clone)]
@ -24,8 +29,8 @@ pub async fn get_config() -> Config {
let max_file_size = env::var("UPLOAD_MAX_BYTES") let max_file_size = env::var("UPLOAD_MAX_BYTES")
.ok() .ok()
.and_then(|variable| variable.parse().ok()) .and_then(|variable| variable.parse().ok())
.unwrap_or(8 * 1024 * 1024); .or(Some(8 * 1024 * 1024))
let max_file_size = (max_file_size != 0).then(|| max_file_size); .filter(|&max_file_size| max_file_size != 0);
let static_dir = let static_dir =
PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned())); PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned()));
@ -36,11 +41,26 @@ pub async fn get_config() -> Config {
let no_auth_limits = get_no_auth_limits(); let no_auth_limits = get_no_auth_limits();
let enable_rate_limit = matches!(env::var("RATE_LIMIT").as_deref(), Ok("true") | Err(_));
let proxied = env::var("PROXIED").as_deref() == Ok("true");
let rate_limit_per_second = env::var("RATE_LIMIT_PER_SECOND")
.ok()
.and_then(|rate_limit| rate_limit.parse().ok())
.unwrap_or(60);
let rate_limit_burst = env::var("RATE_LIMIT_BURST")
.ok()
.and_then(|rate_limit| rate_limit.parse().ok())
.unwrap_or(1440);
Config { Config {
static_dir, static_dir,
files_dir, files_dir,
max_file_size, max_file_size,
no_auth_limits, no_auth_limits,
enable_rate_limit,
proxied,
rate_limit_per_second,
rate_limit_burst,
} }
} }
@ -54,8 +74,8 @@ fn get_no_auth_limits() -> Option<NoAuthLimits> {
(Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => { (Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => {
Some(NoAuthLimits { Some(NoAuthLimits {
auth_password, auth_password,
max_time: Duration::seconds(max_time as i64), max_time: (max_time as i64).seconds(),
large_file_max_time: Duration::seconds(large_file_max_time as i64), large_file_max_time: (large_file_max_time as i64).seconds(),
large_file_size, large_file_size,
}) })
} }

View File

@ -1,5 +1,6 @@
use sqlx::postgres::{PgPool, PgPoolOptions}; use sqlx::postgres::{PgPool, PgPoolOptions};
use std::env; use std::env;
use time::ext::NumericalStdDuration;
pub async fn setup_db() -> PgPool { pub async fn setup_db() -> PgPool {
let conn_url = &get_db_url(); let conn_url = &get_db_url();
@ -7,7 +8,7 @@ pub async fn setup_db() -> PgPool {
let pool = PgPoolOptions::new() let pool = PgPoolOptions::new()
.max_connections(5) .max_connections(5)
.connect_timeout(std::time::Duration::from_secs(5)) .acquire_timeout(5.std_seconds())
.connect(conn_url) .connect(conn_url)
.await .await
.expect("could not create db pool"); .expect("could not create db pool");

View File

@ -1,9 +1,9 @@
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use time::OffsetDateTime;
use sqlx::{postgres::PgPool, Row}; use sqlx::{postgres::PgPool, Row};
use std::cmp::max; use std::cmp::max;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use time::ext::NumericalStdDuration; use time::ext::NumericalStdDuration;
use time::OffsetDateTime;
use tokio::fs; use tokio::fs;
use tokio::sync::mpsc::Receiver; use tokio::sync::mpsc::Receiver;
use tokio::time::timeout; use tokio::time::timeout;

View File

@ -1,22 +1,24 @@
use std::{path::PathBuf, str::FromStr}; use std::{path::PathBuf, str::FromStr, time::SystemTime};
use actix_files::NamedFile; use actix_files::NamedFile;
use actix_web::{ use actix_web::{
error, error,
http::header::{ http::header::{
Accept, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue, Accept, CacheControl, CacheDirective, Charset, ContentDisposition, DispositionParam,
Header, DispositionType, Expires, ExtendedValue, Header, HeaderValue, HttpDate, TryIntoHeaderValue,
ACCEPT, CACHE_CONTROL, CONTENT_TYPE, EXPIRES, VARY,
}, },
web, Error, HttpRequest, HttpResponse, web, Error, HttpRequest, HttpResponse,
}; };
use mime::{Mime, TEXT_HTML}; use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use std::path::Path; use std::path::Path;
use time::OffsetDateTime;
use tokio::fs; use tokio::fs;
use url::Url; use url::Url;
use crate::config::Config;
use crate::deleter; use crate::deleter;
use crate::{config::Config, file_kind::FileKind};
const TEXT_VIEW_HTML: &str = include_str!("../template/text-view.html"); const TEXT_VIEW_HTML: &str = include_str!("../template/text-view.html");
const URL_VIEW_HTML: &str = include_str!("../template/url-view.html"); const URL_VIEW_HTML: &str = include_str!("../template/url-view.html");
@ -35,16 +37,19 @@ pub async fn download(
config: web::Data<Config>, config: web::Data<Config>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let id = req.match_info().query("id"); let id = req.match_info().query("id");
let (file_id, file_name, file_kind, delete) = load_file_info(id, &db).await?; let (file_id, file_name, valid_till, content_type, delete) = load_file_info(id, &db).await?;
let mut path = config.files_dir.clone(); let mut path = config.files_dir.clone();
path.push(&file_id); path.push(&file_id);
let file_mime = get_content_type(&path); let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM);
let response = match get_view_type(&req, &file_kind, &file_mime, &path, delete).await { let mut response = match get_view_type(&req, &mime, &path, delete).await {
ViewType::Raw => build_file_response(false, &file_name, path, file_mime, &req), ViewType::Raw => build_file_response(false, &file_name, path, mime, &req).await,
ViewType::Download => build_file_response(true, &file_name, path, file_mime, &req), ViewType::Download => build_file_response(true, &file_name, path, mime, &req).await,
ViewType::Html => build_text_response(&path).await, ViewType::Html => build_text_response(&path).await,
}; }?;
insert_cache_headers(&mut response, valid_till);
if delete { if delete {
deleter::delete_by_id(&db, &file_id, &config.files_dir) deleter::delete_by_id(&db, &file_id, &config.files_dir)
.await .await
@ -53,15 +58,16 @@ pub async fn download(
error::ErrorInternalServerError("could not delete file") error::ErrorInternalServerError("could not delete file")
})?; })?;
} }
response
Ok(response)
} }
async fn load_file_info( async fn load_file_info(
id: &str, id: &str,
db: &web::Data<sqlx::Pool<sqlx::Postgres>>, db: &web::Data<sqlx::Pool<sqlx::Postgres>>,
) -> Result<(String, String, String, bool), Error> { ) -> Result<(String, String, OffsetDateTime, String, bool), Error> {
sqlx::query_as( sqlx::query_as(
"SELECT file_id, file_name, kind, delete_on_download from files WHERE file_id = $1", "SELECT file_id, file_name, valid_till, content_type, delete_on_download from files WHERE file_id = $1",
) )
.bind(id) .bind(id)
.fetch_optional(db.as_ref()) .fetch_optional(db.as_ref())
@ -73,18 +79,9 @@ async fn load_file_info(
.ok_or_else(|| error::ErrorNotFound("file does not exist or has expired")) .ok_or_else(|| error::ErrorNotFound("file does not exist or has expired"))
} }
fn get_content_type(path: &Path) -> Mime {
let std_path = std::path::Path::new(path.as_os_str());
tree_magic_mini::from_filepath(std_path)
.unwrap_or("application/octet-stream")
.parse::<Mime>()
.expect("tree_magic_mini should not produce invalid mime")
}
async fn get_view_type( async fn get_view_type(
req: &HttpRequest, req: &HttpRequest,
file_kind: &str, mime: &Mime,
file_mime: &Mime,
file_path: &Path, file_path: &Path,
delete_on_download: bool, delete_on_download: bool,
) -> ViewType { ) -> ViewType {
@ -94,9 +91,7 @@ async fn get_view_type(
if req.query_string().contains("raw") { if req.query_string().contains("raw") {
return ViewType::Raw; return ViewType::Raw;
} }
let is_text = if mime.type_() != mime::TEXT {
FileKind::from_str(file_kind) == Ok(FileKind::Text) || file_mime.type_() == mime::TEXT;
if !is_text {
return ViewType::Raw; return ViewType::Raw;
} }
if get_file_size(file_path).await >= TEXT_VIEW_SIZE_LIMIT { if get_file_size(file_path).await >= TEXT_VIEW_SIZE_LIMIT {
@ -107,7 +102,7 @@ async fn get_view_type(
if accept_mime == TEXT_HTML { if accept_mime == TEXT_HTML {
return ViewType::Html; return ViewType::Html;
} }
if mime_matches(&accept_mime, file_mime) { if mime_matches(&accept_mime, mime) {
break; break;
} }
} }
@ -147,11 +142,11 @@ async fn build_text_response(path: &Path) -> Result<HttpResponse, Error> {
.body(html)) .body(html))
} }
fn build_file_response( async fn build_file_response(
download: bool, download: bool,
file_name: &str, file_name: &str,
path: PathBuf, path: PathBuf,
content_type: Mime, mime: Mime,
req: &HttpRequest, req: &HttpRequest,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let content_disposition = ContentDisposition { let content_disposition = ContentDisposition {
@ -167,9 +162,32 @@ fn build_file_response(
log::error!("file could not be read {:?}", file_err); log::error!("file could not be read {:?}", file_err);
error::ErrorInternalServerError("this file should be here but could not be found") error::ErrorInternalServerError("this file should be here but could not be found")
})? })?
.set_content_type(content_type) .set_content_type(mime)
.set_content_disposition(content_disposition); .set_content_disposition(content_disposition);
Ok(file.into_response(req))
let mut response = file.into_response(req);
add_headers(req, download, &mut response);
Ok(response)
}
fn add_headers(req: &HttpRequest, download: bool, response: &mut HttpResponse) {
// if the browser is trying to fetch this resource in a secure context pretend the reponse is
// just binary data so it won't be executed
let sec_fetch_mode = req
.headers()
.get("sec-fetch-mode")
.and_then(|v| v.to_str().ok());
if !download && sec_fetch_mode.is_some() && sec_fetch_mode != Some("navigate") {
response.headers_mut().insert(
CONTENT_TYPE,
HeaderValue::from_str(APPLICATION_OCTET_STREAM.as_ref())
.expect("mime type can be encoded to header value"),
);
}
// the reponse varies based on these request headers
response
.headers_mut()
.append(VARY, HeaderValue::from_static("accept, sec-fetch-mode"));
} }
fn get_disposition_params(filename: &str) -> Vec<DispositionParam> { fn get_disposition_params(filename: &str) -> Vec<DispositionParam> {
@ -183,3 +201,33 @@ fn get_disposition_params(filename: &str) -> Vec<DispositionParam> {
} }
parameters parameters
} }
fn insert_cache_headers(response: &mut HttpResponse, valid_till: OffsetDateTime) {
if response.status().is_success() {
let valid_duration = valid_till - OffsetDateTime::now_utc();
let valid_cache_seconds = valid_duration.whole_seconds().clamp(0, u32::MAX as i64) as u32;
response.headers_mut().insert(
CACHE_CONTROL,
CacheControl(vec![
CacheDirective::Public,
CacheDirective::MustRevalidate,
CacheDirective::MaxAge(valid_cache_seconds), // todo: expiry in seconds
CacheDirective::NoTransform,
CacheDirective::Extension("immutable".to_owned(), None),
])
.try_into_value()
.unwrap(),
);
response.headers_mut().insert(
EXPIRES,
Expires(HttpDate::from(
SystemTime::now() + std::time::Duration::from_secs(valid_cache_seconds.into()),
))
.try_into_value()
.unwrap(),
);
}
response
.headers_mut()
.insert(VARY, HeaderValue::from_name(ACCEPT));
}

View File

@ -1,27 +0,0 @@
use std::{fmt::Display, str::FromStr};
#[derive(Debug, PartialEq)]
pub(crate) enum FileKind {
Text,
Binary,
}
impl Display for FileKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FileKind::Text => write!(f, "text"),
FileKind::Binary => write!(f, "binary"),
}
}
}
impl FromStr for FileKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"text" => Ok(FileKind::Text),
"binary" => Ok(FileKind::Binary),
_ => Err(format!("unknown kind {}", s)),
}
}
}

View File

@ -2,14 +2,17 @@ mod config;
mod db; mod db;
mod deleter; mod deleter;
mod download; mod download;
mod file_kind;
mod multipart; mod multipart;
mod rate_limit;
mod template; mod template;
mod upload; mod upload;
use crate::rate_limit::ForwardedPeerIpKeyExtractor;
use actix_files::Files; use actix_files::Files;
use actix_governor::{Governor, GovernorConfigBuilder};
use actix_web::{ use actix_web::{
middleware::{self, Logger}, http::header::{HeaderName, HeaderValue, CONTENT_SECURITY_POLICY, X_CONTENT_TYPE_OPTIONS},
middleware::{self, DefaultHeaders, Logger},
web::{self, Data}, web::{self, Data},
App, Error, HttpResponse, HttpServer, App, Error, HttpResponse, HttpServer,
}; };
@ -18,6 +21,11 @@ use sqlx::postgres::PgPool;
use std::env; use std::env;
use tokio::{sync::mpsc::channel, task}; use tokio::{sync::mpsc::channel, task};
const DEFAULT_CSP: (HeaderName, &str) = (
CONTENT_SECURITY_POLICY,
"default-src 'none'; connect-src 'self'; img-src 'self'; media-src 'self'; font-src 'self'; script-src 'self'; style-src 'self'; object-src 'none'; base-uri 'self'; frame-src 'none'; frame-ancestors 'none'; form-action 'self';"
);
async fn not_found() -> Result<HttpResponse, Error> { async fn not_found() -> Result<HttpResponse, Error> {
Ok(HttpResponse::NotFound() Ok(HttpResponse::NotFound()
.content_type("text/plain") .content_type("text/plain")
@ -47,10 +55,25 @@ async fn main() -> std::io::Result<()> {
template::write_prefillable_templates(&config).await; template::write_prefillable_templates(&config).await;
let config = Data::new(config); let config = Data::new(config);
let governor_conf = GovernorConfigBuilder::default()
.per_second(config.rate_limit_per_second)
.burst_size(config.rate_limit_burst)
.key_extractor(ForwardedPeerIpKeyExtractor {
proxied: config.proxied,
})
.use_headers()
.finish()
.unwrap();
HttpServer::new({ HttpServer::new({
move || { move || {
App::new() let app = App::new()
.wrap(Logger::new(r#"%{r}a "%r" =%s %bbytes %Tsec"#)) .wrap(Logger::new(r#"%{r}a "%r" =%s %bbytes %Tsec"#))
.wrap(
DefaultHeaders::new()
.add(DEFAULT_CSP)
.add((X_CONTENT_TYPE_OPTIONS, HeaderValue::from_static("nosniff"))),
)
.wrap(middleware::Compress::default()) .wrap(middleware::Compress::default())
.app_data(db.clone()) .app_data(db.clone())
.app_data(expiry_watch_sender.clone()) .app_data(expiry_watch_sender.clone())
@ -62,7 +85,19 @@ async fn main() -> std::io::Result<()> {
.route(web::get().to(upload::uploaded)), .route(web::get().to(upload::uploaded)),
) )
.service(Files::new("/static", "static").disable_content_disposition()) .service(Files::new("/static", "static").disable_content_disposition())
.service( .default_service(web::route().to(not_found));
if config.enable_rate_limit {
app.service(
web::resource([
"/{id:[a-z0-9]{5}}",
"/{id:[a-z0-9]{5}}/",
"/{id:[a-z0-9]{5}}/{name}",
])
.wrap(Governor::new(&governor_conf))
.route(web::get().to(download::download)),
)
} else {
app.service(
web::resource([ web::resource([
"/{id:[a-z0-9]{5}}", "/{id:[a-z0-9]{5}}",
"/{id:[a-z0-9]{5}}/", "/{id:[a-z0-9]{5}}/",
@ -70,7 +105,7 @@ async fn main() -> std::io::Result<()> {
]) ])
.route(web::get().to(download::download)), .route(web::get().to(download::download)),
) )
.default_service(web::route().to(not_found)) }
} }
}) })
.bind(bind_address)? .bind(bind_address)?

View File

@ -1,31 +1,30 @@
use crate::{config, file_kind::FileKind}; use crate::config;
use actix_multipart::{Field, Multipart}; use actix_multipart::{Field, Multipart};
use actix_web::{error, http::header::DispositionParam, Error}; use actix_web::{error, http::header::DispositionParam, Error};
use futures_util::{StreamExt, TryStreamExt}; use futures_util::{StreamExt, TryStreamExt};
use mime::{Mime, TEXT_PLAIN};
use std::path::Path; use std::path::Path;
use time::OffsetDateTime; use time::{Duration, OffsetDateTime};
use time::{ext::NumericalDuration, Duration};
use tokio::{fs::File, io::AsyncWriteExt}; use tokio::{fs::File, io::AsyncWriteExt};
const MAX_UPLOAD_SECONDS: i64 = 31 * 24 * 60 * 60; const MAX_UPLOAD_DURATION: Duration = Duration::days(31);
const DEFAULT_UPLOAD_SECONDS: u32 = 30 * 60; const DEFAULT_UPLOAD_DURATION: Duration = Duration::minutes(30);
pub(crate) struct UploadConfig { pub(crate) struct UploadConfig {
pub original_name: String, pub original_name: Option<String>,
pub content_type: Mime,
pub valid_till: OffsetDateTime, pub valid_till: OffsetDateTime,
pub kind: FileKind,
pub delete_on_download: bool, pub delete_on_download: bool,
} }
pub(crate) async fn parse_multipart( pub(crate) async fn parse_multipart(
mut payload: Multipart, mut payload: Multipart,
file_id: &str, file_path: &Path,
file_name: &Path,
config: &config::Config, config: &config::Config,
) -> Result<UploadConfig, error::Error> { ) -> Result<UploadConfig, error::Error> {
let mut original_name: Option<String> = None; let mut original_name: Option<String> = None;
let mut keep_for: Option<String> = None; let mut content_type: Option<Mime> = None;
let mut kind: Option<FileKind> = None; let mut keep_for_seconds: Option<String> = None;
let mut delete_on_download = false; let mut delete_on_download = false;
let mut password = None; let mut password = None;
let mut size = 0; let mut size = 0;
@ -35,25 +34,23 @@ pub(crate) async fn parse_multipart(
let name = name.as_str(); let name = name.as_str();
match name { match name {
"keep_for" => { "keep_for" => {
keep_for = Some(parse_string(name, field).await?); keep_for_seconds = Some(parse_string(name, field).await?);
} }
"file" => { "file" => {
let file_original_name = get_original_filename(&field); let (mime, uploaded_name) = get_file_metadata(&field);
if file_original_name == None || file_original_name.map(|f| f.as_str()) == Some("") if uploaded_name == None || uploaded_name.map(|f| f.as_str()) == Some("") {
{
continue; continue;
} }
original_name = file_original_name.map(|f| f.to_string()); original_name = uploaded_name.map(|f| f.to_string());
kind = Some(FileKind::Binary); content_type = Some(mime.clone());
size = create_file(file_name, field, config.max_file_size).await?; size = create_file(file_path, field, config.max_file_size).await?;
} }
"text" => { "text" => {
if original_name.is_some() { if original_name.is_some() {
continue; continue;
} }
original_name = Some(format!("{}.txt", file_id)); size = create_file(file_path, field, config.max_file_size).await?;
kind = Some(FileKind::Text); content_type = Some(get_content_type(file_path));
size = create_file(file_name, field, config.max_file_size).await?;
} }
"delete_on_download" => { "delete_on_download" => {
delete_on_download = parse_string(name, field).await? != "false"; delete_on_download = parse_string(name, field).await? != "false";
@ -65,24 +62,24 @@ pub(crate) async fn parse_multipart(
}; };
} }
let original_name = original_name.ok_or_else(|| error::ErrorBadRequest("no content found"))?; let content_type =
let kind = kind.ok_or_else(|| error::ErrorBadRequest("no content found"))?; content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?;
let keep_for: u32 = keep_for let keep_for = keep_for_seconds
.map(|k| k.parse()) .map(|k| k.parse())
.transpose() .transpose()
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))? .map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
.unwrap_or(DEFAULT_UPLOAD_SECONDS); .map(Duration::seconds)
let valid_duration = keep_for.seconds(); .unwrap_or(DEFAULT_UPLOAD_DURATION);
let valid_till = OffsetDateTime::now_utc() + valid_duration; let valid_till = OffsetDateTime::now_utc() + keep_for;
let upload_config = UploadConfig { let upload_config = UploadConfig {
original_name, original_name,
content_type,
valid_till, valid_till,
kind,
delete_on_download, delete_on_download,
}; };
check_requirements(&upload_config, size, password, &valid_duration, config)?; check_requirements(&upload_config, size, password, &keep_for, config)?;
Ok(upload_config) Ok(upload_config)
} }
@ -91,24 +88,25 @@ fn check_requirements(
upload_config: &UploadConfig, upload_config: &UploadConfig,
size: u64, size: u64,
password: Option<String>, password: Option<String>,
valid_duration: &Duration, keep_for: &Duration,
config: &config::Config, config: &config::Config,
) -> Result<(), error::Error> { ) -> Result<(), error::Error> {
if upload_config.original_name.len() > 255 { if let Some(original_name) = upload_config.original_name.as_ref() {
return Err(error::ErrorBadRequest("filename is too long")); if original_name.len() > 255 {
return Err(error::ErrorBadRequest("filename is too long"));
}
} }
let valid_seconds = valid_duration.whole_seconds(); if *keep_for > MAX_UPLOAD_DURATION {
if valid_seconds > MAX_UPLOAD_SECONDS {
return Err(error::ErrorBadRequest(format!( return Err(error::ErrorBadRequest(format!(
"maximum allowed validity is {} seconds, but you specified {} seconds", "maximum allowed validity is {}, but you specified {}",
MAX_UPLOAD_SECONDS, valid_seconds MAX_UPLOAD_DURATION, keep_for
))); )));
} }
if let Some(no_auth_limits) = &config.no_auth_limits { if let Some(no_auth_limits) = &config.no_auth_limits {
let requires_auth = valid_seconds > no_auth_limits.max_time.whole_seconds() let requires_auth = *keep_for > no_auth_limits.max_time
|| valid_seconds > no_auth_limits.large_file_max_time.whole_seconds() || *keep_for > no_auth_limits.large_file_max_time
&& size > no_auth_limits.large_file_size; && size > no_auth_limits.large_file_size;
// hIGh sECUriTy paSsWoRD CHEck // hIGh sECUriTy paSsWoRD CHEck
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) { if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {
@ -181,13 +179,22 @@ async fn write_to_file(
Ok(written_bytes) Ok(written_bytes)
} }
fn get_original_filename(field: &actix_multipart::Field) -> Option<&String> { fn get_file_metadata(field: &actix_multipart::Field) -> (&Mime, Option<&String>) {
field let mime = field.content_type();
let filename = field
.content_disposition() .content_disposition()
.parameters .parameters
.iter() .iter()
.find_map(|param| match param { .find_map(|param| match param {
DispositionParam::Filename(filename) => Some(filename), DispositionParam::Filename(filename) => Some(filename),
_ => None, _ => None,
}) });
(mime, filename)
}
fn get_content_type(path: &Path) -> Mime {
let std_path = std::path::Path::new(path.as_os_str());
tree_magic_mini::from_filepath(std_path)
.and_then(|mime| mime.parse().ok())
.unwrap_or(TEXT_PLAIN)
} }

45
src/rate_limit.rs Normal file
View File

@ -0,0 +1,45 @@
use actix_governor::KeyExtractor;
use actix_governor::PeerIpKeyExtractor;
use actix_web::{dev::ServiceRequest, http::header::ContentType};
use governor::clock::{Clock, DefaultClock, QuantaInstant};
use governor::NotUntil;
use std::net::IpAddr;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ForwardedPeerIpKeyExtractor {
pub proxied: bool,
}
impl KeyExtractor for ForwardedPeerIpKeyExtractor {
type Key = IpAddr;
type KeyExtractionError = &'static str;
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
let forwarded_for = req.headers().get("x-forwarded-for");
if !self.proxied && forwarded_for.is_some() {
let forwarded_for = forwarded_for
.unwrap()
.to_str()
.map_err(|_| "x-forwarded-for contains invalid header value")?;
forwarded_for
.parse::<IpAddr>()
.map_err(|_| "x-forwarded-for contains invalid ip adress")
} else {
PeerIpKeyExtractor.extract(req)
}
}
fn response_error_content(&self, negative: &NotUntil<QuantaInstant>) -> (String, ContentType) {
let wait_time = negative
.wait_time_from(DefaultClock::default().now())
.as_secs();
(
format!("too many requests, retry in {}s", wait_time),
ContentType::plaintext(),
)
}
fn response_error(&self, err: Self::KeyExtractionError) -> actix_web::Error {
actix_web::error::ErrorUnauthorized(err.to_string())
}
}

View File

@ -12,15 +12,13 @@ const AUTH_SNIPPET_HTML: &str = include_str!("../snippet/auth.html.snippet");
const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet"); const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet");
pub async fn write_prefillable_templates(config: &Config) { pub async fn write_prefillable_templates(config: &Config) {
let index_html = build_index_html(config);
let auth_hide_js = build_auth_hide_js(config);
let index_path = config.static_dir.join("index.html"); let index_path = config.static_dir.join("index.html");
let auth_hide_path = config.static_dir.join("auth-hide.js"); fs::write(index_path, build_index_html(config))
fs::write(index_path, index_html)
.await .await
.expect("could not write index.html to static folder"); .expect("could not write index.html to static folder");
if let Some(auth_hide_js) = auth_hide_js {
let auth_hide_path = config.static_dir.join("auth-hide.js");
if let Some(auth_hide_js) = build_auth_hide_js(config) {
fs::write(auth_hide_path, auth_hide_js) fs::write(auth_hide_path, auth_hide_js)
.await .await
.expect("could not write auth-hide.js to static folder"); .expect("could not write auth-hide.js to static folder");
@ -33,45 +31,30 @@ pub async fn write_prefillable_templates(config: &Config) {
} }
fn build_index_html(config: &Config) -> String { fn build_index_html(config: &Config) -> String {
let auth_snippet = config let mut html = INDEX_HTML.to_owned();
.no_auth_limits if let Some(limit) = config.no_auth_limits.as_ref() {
.as_ref() html = html
.map_or("", |_| AUTH_SNIPPET_HTML); .replace("{auth_snippet}", AUTH_SNIPPET_HTML.trim_end())
let max_size_snippet = config .replace("{auth_time}", &render_duration(limit.max_time))
.max_file_size .replace(
.as_ref() "{auth_large_time}",
.map_or("", |_| MAX_SIZE_SNIPPET_HTML); &render_duration(limit.large_file_max_time),
INDEX_HTML )
.replace("{max_size_snippet}", max_size_snippet) .replace(
.replace( "{auth_large_size}",
"{max_size}", &render_file_size(limit.large_file_size),
&render_file_size(config.max_file_size.unwrap_or(0)), );
) } else {
.replace("{auth_snippet}", auth_snippet) html = html.replace("{auth_snippet}", "");
.replace( }
"{auth_time}", if let Some(max_file_size) = config.max_file_size {
&config html = html
.no_auth_limits .replace("{max_size_snippet}", MAX_SIZE_SNIPPET_HTML.trim_end())
.as_ref() .replace("{max_size}", &render_file_size(max_file_size));
.map(|limit| limit.max_time) } else {
.map_or("".into(), render_duration), html = html.replace("{max_size_snippet}", "")
) };
.replace( html
"{auth_large_time}",
&config
.no_auth_limits
.as_ref()
.map(|limit| limit.large_file_max_time)
.map_or("".into(), render_duration),
)
.replace(
"{auth_large_size}",
&config
.no_auth_limits
.as_ref()
.map(|limit| limit.large_file_size)
.map_or("".into(), render_file_size),
)
} }
fn render_file_size(size: u64) -> String { fn render_file_size(size: u64) -> String {

View File

@ -1,7 +1,6 @@
use std::io::ErrorKind; use std::io::ErrorKind;
use crate::config::Config; use crate::config::Config;
use crate::file_kind::FileKind;
use crate::multipart::UploadConfig; use crate::multipart::UploadConfig;
use crate::{multipart, template}; use crate::{multipart, template};
use actix_files::NamedFile; use actix_files::NamedFile;
@ -18,8 +17,8 @@ const UPLOAD_HTML: &str = include_str!("../template/upload.html");
const UPLOAD_SHORT_HTML: &str = include_str!("../template/upload-short.html"); const UPLOAD_SHORT_HTML: &str = include_str!("../template/upload-short.html");
const ID_CHARS: &[char] = &[ const ID_CHARS: &[char] = &[
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u',
'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'v', 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9',
]; ];
pub async fn index(config: web::Data<Config>) -> Result<NamedFile, Error> { pub async fn index(config: web::Data<Config>) -> Result<NamedFile, Error> {
@ -42,11 +41,11 @@ pub async fn upload(
error::ErrorInternalServerError("could not create file") error::ErrorInternalServerError("could not create file")
})?; })?;
let parsed_multipart = multipart::parse_multipart(payload, &file_id, &file_name, &config).await; let parsed_multipart = multipart::parse_multipart(payload, &file_name, &config).await;
let UploadConfig { let UploadConfig {
original_name, original_name,
content_type,
valid_till, valid_till,
kind,
delete_on_download, delete_on_download,
} = match parsed_multipart { } = match parsed_multipart {
Ok(data) => data, Ok(data) => data,
@ -65,14 +64,17 @@ pub async fn upload(
} }
}; };
let file_name = original_name
.clone()
.unwrap_or_else(|| format!("{}.txt", file_id));
let db_insert = sqlx::query( let db_insert = sqlx::query(
"INSERT INTO Files (file_id, file_name, valid_till, kind, delete_on_download) \ "INSERT INTO Files (file_id, file_name, content_type, valid_till, delete_on_download) \
VALUES ($1, $2, $3, $4, $5)", VALUES ($1, $2, $3, $4, $5)",
) )
.bind(&file_id) .bind(&file_id)
.bind(&original_name) .bind(&file_name)
.bind(&content_type.to_string())
.bind(valid_till) .bind(valid_till)
.bind(kind.to_string())
.bind(delete_on_download) .bind(delete_on_download)
.execute(db.as_ref()) .execute(db.as_ref())
.await; .await;
@ -90,24 +92,24 @@ pub async fn upload(
} }
log::info!( log::info!(
"{} create new file {} (valid_till: {}, kind: {}, delete_on_download: {})", "{} create new file {} (valid_till: {}, content_type: {}, delete_on_download: {})",
req.connection_info().realip_remote_addr().unwrap_or("-"), req.connection_info().realip_remote_addr().unwrap_or("-"),
file_id, file_id,
valid_till, valid_till,
kind, content_type,
delete_on_download delete_on_download
); );
expiry_watch_sender.send(()).await.unwrap(); expiry_watch_sender.send(()).await.unwrap();
let redirect = if kind == FileKind::Binary { let redirect = if let Some(original_name) = original_name.as_ref() {
let encoded_name = urlencoding::encode(&original_name); let encoded_name = urlencoding::encode(original_name);
format!("/upload/{}/{}", file_id, encoded_name) format!("/upload/{}/{}", file_id, encoded_name)
} else { } else {
format!("/upload/{}", file_id) format!("/upload/{}", file_id)
}; };
let url = get_file_url(&req, &file_id, Some(&original_name)); let url = get_file_url(&req, &file_id, original_name.as_deref());
Ok(HttpResponse::SeeOther() Ok(HttpResponse::SeeOther()
.insert_header((LOCATION, redirect)) .insert_header((LOCATION, redirect))
.body(format!("{}\n", url))) .body(format!("{}\n", url)))