Compare commits

..

No commits in common. "1527445857000fc2e040053db1353c42915ada81" and "e0b5a3fc65ca77d25db97cd426a069ba880c2ad3" have entirely different histories.

13 changed files with 336 additions and 557 deletions

526
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,34 +1,32 @@
[package] [package]
name = "datatrash" name = "datatrash"
version = "1.1.6" version = "1.1.5"
authors = ["neri"] authors = ["neri"]
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
actix-web = { version = "4.2.1", default-features = false, features = [ actix-web = { version = "4.1.0", default-features = false, features = [
"macros", "macros",
"compress-gzip", "compress-gzip",
"compress-zstd", "compress-zstd",
] } ] }
sqlx = { version = "0.6.2", default-features = false, features = [ sqlx = { version = "0.6.0", default-features = false, features = [
"runtime-tokio-rustls", "runtime-tokio-rustls",
"postgres", "postgres",
"time", "time",
] } ] }
env_logger = "0.9.1" env_logger = "0.9.0"
log = "0.4.17" log = "0.4.17"
actix-files = "0.6.2" actix-files = "0.6.1"
tokio = { version = "1.21.2", features = ["rt", "macros", "sync"] } tokio = { version = "1.19.2", features = ["rt", "macros", "sync"] }
actix-multipart = "0.4.0" actix-multipart = "0.4.0"
futures-util = "0.3.24" futures-util = "0.3.21"
rand = "0.8.5" rand = "0.8.5"
time = "0.3.14" time = "0.3.11"
htmlescape = "0.3.1" htmlescape = "0.3.1"
urlencoding = "2.1.2" urlencoding = "2.1.0"
tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] } tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] }
mime = "0.3.16" mime = "0.3.16"
url = "2.3.1" url = "2.2.2"
actix-governor = "0.3.2"
governor = "0.4.2"

View File

@ -18,16 +18,12 @@ To run the software directly, use the compiling instructions below.
### General configuration ### General configuration
| environment variable | default value | description | | environment variable | default value |
| --------------------- | -------------- | ---------------------------------------------- | | -------------------- | -------------- |
| STATIC_DIR | ./static | directory to generate "static" files into | | STATIC_DIR | ./static |
| FILES_DIR | ./files | directory to save uploaded files into | | FILES_DIR | ./files |
| UPLOAD_MAX_BYTES | 8388608 (8MiB) | maximum size for uploaded files | | UPLOAD_MAX_BYTES | 8388608 (8MiB) |
| BIND_ADDRESS | 0.0.0.0:8000 | address to bind the server to | | BIND_ADDRESS | 0.0.0.0:8000 |
| RATE_LIMIT | true | whether download rate should be limited |
| RATE_LIMIT_PROXIED | false | whether rate limit should read x-forwarded-for |
| RATE_LIMIT_PER_SECOND | 60 | seconds to wait between requests |
| RATE_LIMIT_BURST | 1440 | allowed request burst |
### Database configuration ### Database configuration

View File

@ -10,5 +10,3 @@ CREATE TABLE IF NOT EXISTS files (
ALTER TABLE files ADD COLUMN IF NOT EXISTS delete_on_download boolean; ALTER TABLE files ADD COLUMN IF NOT EXISTS delete_on_download boolean;
ALTER TABLE files ALTER COLUMN delete_on_download set not null; ALTER TABLE files ALTER COLUMN delete_on_download set not null;
ALTER TABLE files ALTER COLUMN valid_till TYPE timestamptz; ALTER TABLE files ALTER COLUMN valid_till TYPE timestamptz;
ALTER TABLE files DROP COLUMN IF EXISTS kind;
ALTER TABLE files ADD COLUMN IF NOT EXISTS content_type varchar(255);

View File

@ -1,7 +1,6 @@
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use time::ext::NumericalDuration;
use time::Duration; use time::Duration;
use tokio::fs; use tokio::fs;
@ -11,10 +10,6 @@ pub struct Config {
pub files_dir: PathBuf, pub files_dir: PathBuf,
pub max_file_size: Option<u64>, pub max_file_size: Option<u64>,
pub no_auth_limits: Option<NoAuthLimits>, pub no_auth_limits: Option<NoAuthLimits>,
pub enable_rate_limit: bool,
pub proxied: bool,
pub rate_limit_per_second: u64,
pub rate_limit_burst: u32,
} }
#[derive(Clone)] #[derive(Clone)]
@ -29,8 +24,8 @@ pub async fn get_config() -> Config {
let max_file_size = env::var("UPLOAD_MAX_BYTES") let max_file_size = env::var("UPLOAD_MAX_BYTES")
.ok() .ok()
.and_then(|variable| variable.parse().ok()) .and_then(|variable| variable.parse().ok())
.or(Some(8 * 1024 * 1024)) .unwrap_or(8 * 1024 * 1024);
.filter(|&max_file_size| max_file_size != 0); let max_file_size = (max_file_size != 0).then(|| max_file_size);
let static_dir = let static_dir =
PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned())); PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned()));
@ -41,26 +36,11 @@ pub async fn get_config() -> Config {
let no_auth_limits = get_no_auth_limits(); let no_auth_limits = get_no_auth_limits();
let enable_rate_limit = matches!(env::var("RATE_LIMIT").as_deref(), Ok("true") | Err(_));
let proxied = env::var("PROXIED").as_deref() == Ok("true");
let rate_limit_per_second = env::var("RATE_LIMIT_PER_SECOND")
.ok()
.and_then(|rate_limit| rate_limit.parse().ok())
.unwrap_or(60);
let rate_limit_burst = env::var("RATE_LIMIT_BURST")
.ok()
.and_then(|rate_limit| rate_limit.parse().ok())
.unwrap_or(1440);
Config { Config {
static_dir, static_dir,
files_dir, files_dir,
max_file_size, max_file_size,
no_auth_limits, no_auth_limits,
enable_rate_limit,
proxied,
rate_limit_per_second,
rate_limit_burst,
} }
} }
@ -74,8 +54,8 @@ fn get_no_auth_limits() -> Option<NoAuthLimits> {
(Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => { (Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => {
Some(NoAuthLimits { Some(NoAuthLimits {
auth_password, auth_password,
max_time: (max_time as i64).seconds(), max_time: Duration::seconds(max_time as i64),
large_file_max_time: (large_file_max_time as i64).seconds(), large_file_max_time: Duration::seconds(large_file_max_time as i64),
large_file_size, large_file_size,
}) })
} }

View File

@ -1,6 +1,5 @@
use sqlx::postgres::{PgPool, PgPoolOptions}; use sqlx::postgres::{PgPool, PgPoolOptions};
use std::env; use std::env;
use time::ext::NumericalStdDuration;
pub async fn setup_db() -> PgPool { pub async fn setup_db() -> PgPool {
let conn_url = &get_db_url(); let conn_url = &get_db_url();
@ -8,7 +7,7 @@ pub async fn setup_db() -> PgPool {
let pool = PgPoolOptions::new() let pool = PgPoolOptions::new()
.max_connections(5) .max_connections(5)
.acquire_timeout(5.std_seconds()) .acquire_timeout(std::time::Duration::from_secs(5))
.connect(conn_url) .connect(conn_url)
.await .await
.expect("could not create db pool"); .expect("could not create db pool");

View File

@ -1,9 +1,9 @@
use futures_util::TryStreamExt; use futures_util::TryStreamExt;
use time::OffsetDateTime;
use sqlx::{postgres::PgPool, Row}; use sqlx::{postgres::PgPool, Row};
use std::cmp::max; use std::cmp::max;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use time::ext::NumericalStdDuration; use time::ext::NumericalStdDuration;
use time::OffsetDateTime;
use tokio::fs; use tokio::fs;
use tokio::sync::mpsc::Receiver; use tokio::sync::mpsc::Receiver;
use tokio::time::timeout; use tokio::time::timeout;

View File

@ -6,19 +6,19 @@ use actix_web::{
http::header::{ http::header::{
Accept, CacheControl, CacheDirective, Charset, ContentDisposition, DispositionParam, Accept, CacheControl, CacheDirective, Charset, ContentDisposition, DispositionParam,
DispositionType, Expires, ExtendedValue, Header, HeaderValue, HttpDate, TryIntoHeaderValue, DispositionType, Expires, ExtendedValue, Header, HeaderValue, HttpDate, TryIntoHeaderValue,
ACCEPT, CACHE_CONTROL, CONTENT_TYPE, EXPIRES, VARY, ACCEPT, CACHE_CONTROL, EXPIRES, VARY,
}, },
web, Error, HttpRequest, HttpResponse, web, Error, HttpRequest, HttpResponse,
}; };
use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML}; use mime::{Mime, TEXT_HTML};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use std::path::Path; use std::path::Path;
use time::OffsetDateTime; use time::OffsetDateTime;
use tokio::fs; use tokio::fs;
use url::Url; use url::Url;
use crate::config::Config;
use crate::deleter; use crate::deleter;
use crate::{config::Config, file_kind::FileKind};
const TEXT_VIEW_HTML: &str = include_str!("../template/text-view.html"); const TEXT_VIEW_HTML: &str = include_str!("../template/text-view.html");
const URL_VIEW_HTML: &str = include_str!("../template/url-view.html"); const URL_VIEW_HTML: &str = include_str!("../template/url-view.html");
@ -37,14 +37,14 @@ pub async fn download(
config: web::Data<Config>, config: web::Data<Config>,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let id = req.match_info().query("id"); let id = req.match_info().query("id");
let (file_id, file_name, valid_till, content_type, delete) = load_file_info(id, &db).await?; let (file_id, file_name, valid_till, file_kind, delete) = load_file_info(id, &db).await?;
let mut path = config.files_dir.clone(); let mut path = config.files_dir.clone();
path.push(&file_id); path.push(&file_id);
let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM); let file_mime = get_content_type(&path);
let mut response = match get_view_type(&req, &mime, &path, delete).await { let mut response = match get_view_type(&req, &file_kind, &file_mime, &path, delete).await {
ViewType::Raw => build_file_response(false, &file_name, path, mime, &req).await, ViewType::Raw => build_file_response(false, &file_name, path, file_mime, &req).await,
ViewType::Download => build_file_response(true, &file_name, path, mime, &req).await, ViewType::Download => build_file_response(true, &file_name, path, file_mime, &req).await,
ViewType::Html => build_text_response(&path).await, ViewType::Html => build_text_response(&path).await,
}?; }?;
@ -67,7 +67,7 @@ async fn load_file_info(
db: &web::Data<sqlx::Pool<sqlx::Postgres>>, db: &web::Data<sqlx::Pool<sqlx::Postgres>>,
) -> Result<(String, String, OffsetDateTime, String, bool), Error> { ) -> Result<(String, String, OffsetDateTime, String, bool), Error> {
sqlx::query_as( sqlx::query_as(
"SELECT file_id, file_name, valid_till, content_type, delete_on_download from files WHERE file_id = $1", "SELECT file_id, file_name, valid_till, kind, delete_on_download from files WHERE file_id = $1",
) )
.bind(id) .bind(id)
.fetch_optional(db.as_ref()) .fetch_optional(db.as_ref())
@ -79,9 +79,18 @@ async fn load_file_info(
.ok_or_else(|| error::ErrorNotFound("file does not exist or has expired")) .ok_or_else(|| error::ErrorNotFound("file does not exist or has expired"))
} }
fn get_content_type(path: &Path) -> Mime {
let std_path = std::path::Path::new(path.as_os_str());
tree_magic_mini::from_filepath(std_path)
.unwrap_or("application/octet-stream")
.parse::<Mime>()
.expect("tree_magic_mini should not produce invalid mime")
}
async fn get_view_type( async fn get_view_type(
req: &HttpRequest, req: &HttpRequest,
mime: &Mime, file_kind: &str,
file_mime: &Mime,
file_path: &Path, file_path: &Path,
delete_on_download: bool, delete_on_download: bool,
) -> ViewType { ) -> ViewType {
@ -91,7 +100,9 @@ async fn get_view_type(
if req.query_string().contains("raw") { if req.query_string().contains("raw") {
return ViewType::Raw; return ViewType::Raw;
} }
if mime.type_() != mime::TEXT { let is_text =
FileKind::from_str(file_kind) == Ok(FileKind::Text) || file_mime.type_() == mime::TEXT;
if !is_text {
return ViewType::Raw; return ViewType::Raw;
} }
if get_file_size(file_path).await >= TEXT_VIEW_SIZE_LIMIT { if get_file_size(file_path).await >= TEXT_VIEW_SIZE_LIMIT {
@ -102,7 +113,7 @@ async fn get_view_type(
if accept_mime == TEXT_HTML { if accept_mime == TEXT_HTML {
return ViewType::Html; return ViewType::Html;
} }
if mime_matches(&accept_mime, mime) { if mime_matches(&accept_mime, file_mime) {
break; break;
} }
} }
@ -146,7 +157,7 @@ async fn build_file_response(
download: bool, download: bool,
file_name: &str, file_name: &str,
path: PathBuf, path: PathBuf,
mime: Mime, content_type: Mime,
req: &HttpRequest, req: &HttpRequest,
) -> Result<HttpResponse, Error> { ) -> Result<HttpResponse, Error> {
let content_disposition = ContentDisposition { let content_disposition = ContentDisposition {
@ -162,32 +173,10 @@ async fn build_file_response(
log::error!("file could not be read {:?}", file_err); log::error!("file could not be read {:?}", file_err);
error::ErrorInternalServerError("this file should be here but could not be found") error::ErrorInternalServerError("this file should be here but could not be found")
})? })?
.set_content_type(mime) .set_content_type(content_type)
.set_content_disposition(content_disposition); .set_content_disposition(content_disposition);
let mut response = file.into_response(req); Ok(file.into_response(req))
add_headers(req, download, &mut response);
Ok(response)
}
fn add_headers(req: &HttpRequest, download: bool, response: &mut HttpResponse) {
// if the browser is trying to fetch this resource in a secure context pretend the reponse is
// just binary data so it won't be executed
let sec_fetch_mode = req
.headers()
.get("sec-fetch-mode")
.and_then(|v| v.to_str().ok());
if !download && sec_fetch_mode.is_some() && sec_fetch_mode != Some("navigate") {
response.headers_mut().insert(
CONTENT_TYPE,
HeaderValue::from_str(APPLICATION_OCTET_STREAM.as_ref())
.expect("mime type can be encoded to header value"),
);
}
// the reponse varies based on these request headers
response
.headers_mut()
.append(VARY, HeaderValue::from_static("accept, sec-fetch-mode"));
} }
fn get_disposition_params(filename: &str) -> Vec<DispositionParam> { fn get_disposition_params(filename: &str) -> Vec<DispositionParam> {

27
src/file_kind.rs Normal file
View File

@ -0,0 +1,27 @@
use std::{fmt::Display, str::FromStr};
#[derive(Debug, PartialEq)]
pub(crate) enum FileKind {
Text,
Binary,
}
impl Display for FileKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FileKind::Text => write!(f, "text"),
FileKind::Binary => write!(f, "binary"),
}
}
}
impl FromStr for FileKind {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"text" => Ok(FileKind::Text),
"binary" => Ok(FileKind::Binary),
_ => Err(format!("unknown kind {}", s)),
}
}
}

View File

@ -2,17 +2,14 @@ mod config;
mod db; mod db;
mod deleter; mod deleter;
mod download; mod download;
mod file_kind;
mod multipart; mod multipart;
mod rate_limit;
mod template; mod template;
mod upload; mod upload;
use crate::rate_limit::ForwardedPeerIpKeyExtractor;
use actix_files::Files; use actix_files::Files;
use actix_governor::{Governor, GovernorConfigBuilder};
use actix_web::{ use actix_web::{
http::header::{HeaderName, HeaderValue, CONTENT_SECURITY_POLICY, X_CONTENT_TYPE_OPTIONS}, middleware::{self, Logger},
middleware::{self, DefaultHeaders, Logger},
web::{self, Data}, web::{self, Data},
App, Error, HttpResponse, HttpServer, App, Error, HttpResponse, HttpServer,
}; };
@ -21,11 +18,6 @@ use sqlx::postgres::PgPool;
use std::env; use std::env;
use tokio::{sync::mpsc::channel, task}; use tokio::{sync::mpsc::channel, task};
const DEFAULT_CSP: (HeaderName, &str) = (
CONTENT_SECURITY_POLICY,
"default-src 'none'; connect-src 'self'; img-src 'self'; media-src 'self'; font-src 'self'; script-src 'self'; style-src 'self'; object-src 'none'; base-uri 'self'; frame-src 'none'; frame-ancestors 'none'; form-action 'self';"
);
async fn not_found() -> Result<HttpResponse, Error> { async fn not_found() -> Result<HttpResponse, Error> {
Ok(HttpResponse::NotFound() Ok(HttpResponse::NotFound()
.content_type("text/plain") .content_type("text/plain")
@ -55,25 +47,10 @@ async fn main() -> std::io::Result<()> {
template::write_prefillable_templates(&config).await; template::write_prefillable_templates(&config).await;
let config = Data::new(config); let config = Data::new(config);
let governor_conf = GovernorConfigBuilder::default()
.per_second(config.rate_limit_per_second)
.burst_size(config.rate_limit_burst)
.key_extractor(ForwardedPeerIpKeyExtractor {
proxied: config.proxied,
})
.use_headers()
.finish()
.unwrap();
HttpServer::new({ HttpServer::new({
move || { move || {
let app = App::new() App::new()
.wrap(Logger::new(r#"%{r}a "%r" =%s %bbytes %Tsec"#)) .wrap(Logger::new(r#"%{r}a "%r" =%s %bbytes %Tsec"#))
.wrap(
DefaultHeaders::new()
.add(DEFAULT_CSP)
.add((X_CONTENT_TYPE_OPTIONS, HeaderValue::from_static("nosniff"))),
)
.wrap(middleware::Compress::default()) .wrap(middleware::Compress::default())
.app_data(db.clone()) .app_data(db.clone())
.app_data(expiry_watch_sender.clone()) .app_data(expiry_watch_sender.clone())
@ -85,19 +62,7 @@ async fn main() -> std::io::Result<()> {
.route(web::get().to(upload::uploaded)), .route(web::get().to(upload::uploaded)),
) )
.service(Files::new("/static", "static").disable_content_disposition()) .service(Files::new("/static", "static").disable_content_disposition())
.default_service(web::route().to(not_found)); .service(
if config.enable_rate_limit {
app.service(
web::resource([
"/{id:[a-z0-9]{5}}",
"/{id:[a-z0-9]{5}}/",
"/{id:[a-z0-9]{5}}/{name}",
])
.wrap(Governor::new(&governor_conf))
.route(web::get().to(download::download)),
)
} else {
app.service(
web::resource([ web::resource([
"/{id:[a-z0-9]{5}}", "/{id:[a-z0-9]{5}}",
"/{id:[a-z0-9]{5}}/", "/{id:[a-z0-9]{5}}/",
@ -105,7 +70,7 @@ async fn main() -> std::io::Result<()> {
]) ])
.route(web::get().to(download::download)), .route(web::get().to(download::download)),
) )
} .default_service(web::route().to(not_found))
} }
}) })
.bind(bind_address)? .bind(bind_address)?

View File

@ -1,30 +1,30 @@
use crate::config; use crate::{config, file_kind::FileKind};
use actix_multipart::{Field, Multipart}; use actix_multipart::{Field, Multipart};
use actix_web::{error, http::header::DispositionParam, Error}; use actix_web::{error, http::header::DispositionParam, Error};
use futures_util::{StreamExt, TryStreamExt}; use futures_util::{StreamExt, TryStreamExt};
use mime::{Mime, TEXT_PLAIN};
use std::path::Path; use std::path::Path;
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use tokio::{fs::File, io::AsyncWriteExt}; use tokio::{fs::File, io::AsyncWriteExt};
const MAX_UPLOAD_DURATION: Duration = Duration::days(31); const MAX_UPLOAD_SECONDS: i64 = 31 * 24 * 60 * 60;
const DEFAULT_UPLOAD_DURATION: Duration = Duration::minutes(30); const DEFAULT_UPLOAD_SECONDS: u32 = 30 * 60;
pub(crate) struct UploadConfig { pub(crate) struct UploadConfig {
pub original_name: Option<String>, pub original_name: String,
pub content_type: Mime,
pub valid_till: OffsetDateTime, pub valid_till: OffsetDateTime,
pub kind: FileKind,
pub delete_on_download: bool, pub delete_on_download: bool,
} }
pub(crate) async fn parse_multipart( pub(crate) async fn parse_multipart(
mut payload: Multipart, mut payload: Multipart,
file_path: &Path, file_id: &str,
file_name: &Path,
config: &config::Config, config: &config::Config,
) -> Result<UploadConfig, error::Error> { ) -> Result<UploadConfig, error::Error> {
let mut original_name: Option<String> = None; let mut original_name: Option<String> = None;
let mut content_type: Option<Mime> = None; let mut keep_for: Option<String> = None;
let mut keep_for_seconds: Option<String> = None; let mut kind: Option<FileKind> = None;
let mut delete_on_download = false; let mut delete_on_download = false;
let mut password = None; let mut password = None;
let mut size = 0; let mut size = 0;
@ -34,23 +34,25 @@ pub(crate) async fn parse_multipart(
let name = name.as_str(); let name = name.as_str();
match name { match name {
"keep_for" => { "keep_for" => {
keep_for_seconds = Some(parse_string(name, field).await?); keep_for = Some(parse_string(name, field).await?);
} }
"file" => { "file" => {
let (mime, uploaded_name) = get_file_metadata(&field); let file_original_name = get_original_filename(&field);
if uploaded_name == None || uploaded_name.map(|f| f.as_str()) == Some("") { if file_original_name == None || file_original_name.map(|f| f.as_str()) == Some("")
{
continue; continue;
} }
original_name = uploaded_name.map(|f| f.to_string()); original_name = file_original_name.map(|f| f.to_string());
content_type = Some(mime.clone()); kind = Some(FileKind::Binary);
size = create_file(file_path, field, config.max_file_size).await?; size = create_file(file_name, field, config.max_file_size).await?;
} }
"text" => { "text" => {
if original_name.is_some() { if original_name.is_some() {
continue; continue;
} }
size = create_file(file_path, field, config.max_file_size).await?; original_name = Some(format!("{}.txt", file_id));
content_type = Some(get_content_type(file_path)); kind = Some(FileKind::Text);
size = create_file(file_name, field, config.max_file_size).await?;
} }
"delete_on_download" => { "delete_on_download" => {
delete_on_download = parse_string(name, field).await? != "false"; delete_on_download = parse_string(name, field).await? != "false";
@ -62,24 +64,24 @@ pub(crate) async fn parse_multipart(
}; };
} }
let content_type = let original_name = original_name.ok_or_else(|| error::ErrorBadRequest("no content found"))?;
content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?; let kind = kind.ok_or_else(|| error::ErrorBadRequest("no content found"))?;
let keep_for = keep_for_seconds let keep_for: u32 = keep_for
.map(|k| k.parse()) .map(|k| k.parse())
.transpose() .transpose()
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))? .map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
.map(Duration::seconds) .unwrap_or(DEFAULT_UPLOAD_SECONDS);
.unwrap_or(DEFAULT_UPLOAD_DURATION); let valid_duration = Duration::seconds(keep_for.into());
let valid_till = OffsetDateTime::now_utc() + keep_for; let valid_till = OffsetDateTime::now_utc() + valid_duration;
let upload_config = UploadConfig { let upload_config = UploadConfig {
original_name, original_name,
content_type,
valid_till, valid_till,
kind,
delete_on_download, delete_on_download,
}; };
check_requirements(&upload_config, size, password, &keep_for, config)?; check_requirements(&upload_config, size, password, &valid_duration, config)?;
Ok(upload_config) Ok(upload_config)
} }
@ -88,25 +90,24 @@ fn check_requirements(
upload_config: &UploadConfig, upload_config: &UploadConfig,
size: u64, size: u64,
password: Option<String>, password: Option<String>,
keep_for: &Duration, valid_duration: &Duration,
config: &config::Config, config: &config::Config,
) -> Result<(), error::Error> { ) -> Result<(), error::Error> {
if let Some(original_name) = upload_config.original_name.as_ref() { if upload_config.original_name.len() > 255 {
if original_name.len() > 255 { return Err(error::ErrorBadRequest("filename is too long"));
return Err(error::ErrorBadRequest("filename is too long"));
}
} }
if *keep_for > MAX_UPLOAD_DURATION { let valid_seconds = valid_duration.whole_seconds();
if valid_seconds > MAX_UPLOAD_SECONDS {
return Err(error::ErrorBadRequest(format!( return Err(error::ErrorBadRequest(format!(
"maximum allowed validity is {}, but you specified {}", "maximum allowed validity is {} seconds, but you specified {} seconds",
MAX_UPLOAD_DURATION, keep_for MAX_UPLOAD_SECONDS, valid_seconds
))); )));
} }
if let Some(no_auth_limits) = &config.no_auth_limits { if let Some(no_auth_limits) = &config.no_auth_limits {
let requires_auth = *keep_for > no_auth_limits.max_time let requires_auth = valid_seconds > no_auth_limits.max_time.whole_seconds()
|| *keep_for > no_auth_limits.large_file_max_time || valid_seconds > no_auth_limits.large_file_max_time.whole_seconds()
&& size > no_auth_limits.large_file_size; && size > no_auth_limits.large_file_size;
// hIGh sECUriTy paSsWoRD CHEck // hIGh sECUriTy paSsWoRD CHEck
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) { if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {
@ -179,22 +180,13 @@ async fn write_to_file(
Ok(written_bytes) Ok(written_bytes)
} }
fn get_file_metadata(field: &actix_multipart::Field) -> (&Mime, Option<&String>) { fn get_original_filename(field: &actix_multipart::Field) -> Option<&String> {
let mime = field.content_type(); field
let filename = field
.content_disposition() .content_disposition()
.parameters .parameters
.iter() .iter()
.find_map(|param| match param { .find_map(|param| match param {
DispositionParam::Filename(filename) => Some(filename), DispositionParam::Filename(filename) => Some(filename),
_ => None, _ => None,
}); })
(mime, filename)
}
fn get_content_type(path: &Path) -> Mime {
let std_path = std::path::Path::new(path.as_os_str());
tree_magic_mini::from_filepath(std_path)
.and_then(|mime| mime.parse().ok())
.unwrap_or(TEXT_PLAIN)
} }

View File

@ -1,45 +0,0 @@
use actix_governor::KeyExtractor;
use actix_governor::PeerIpKeyExtractor;
use actix_web::{dev::ServiceRequest, http::header::ContentType};
use governor::clock::{Clock, DefaultClock, QuantaInstant};
use governor::NotUntil;
use std::net::IpAddr;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ForwardedPeerIpKeyExtractor {
pub proxied: bool,
}
impl KeyExtractor for ForwardedPeerIpKeyExtractor {
type Key = IpAddr;
type KeyExtractionError = &'static str;
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
let forwarded_for = req.headers().get("x-forwarded-for");
if !self.proxied && forwarded_for.is_some() {
let forwarded_for = forwarded_for
.unwrap()
.to_str()
.map_err(|_| "x-forwarded-for contains invalid header value")?;
forwarded_for
.parse::<IpAddr>()
.map_err(|_| "x-forwarded-for contains invalid ip adress")
} else {
PeerIpKeyExtractor.extract(req)
}
}
fn response_error_content(&self, negative: &NotUntil<QuantaInstant>) -> (String, ContentType) {
let wait_time = negative
.wait_time_from(DefaultClock::default().now())
.as_secs();
(
format!("too many requests, retry in {}s", wait_time),
ContentType::plaintext(),
)
}
fn response_error(&self, err: Self::KeyExtractionError) -> actix_web::Error {
actix_web::error::ErrorUnauthorized(err.to_string())
}
}

View File

@ -1,6 +1,7 @@
use std::io::ErrorKind; use std::io::ErrorKind;
use crate::config::Config; use crate::config::Config;
use crate::file_kind::FileKind;
use crate::multipart::UploadConfig; use crate::multipart::UploadConfig;
use crate::{multipart, template}; use crate::{multipart, template};
use actix_files::NamedFile; use actix_files::NamedFile;
@ -17,8 +18,8 @@ const UPLOAD_HTML: &str = include_str!("../template/upload.html");
const UPLOAD_SHORT_HTML: &str = include_str!("../template/upload-short.html"); const UPLOAD_SHORT_HTML: &str = include_str!("../template/upload-short.html");
const ID_CHARS: &[char] = &[ const ID_CHARS: &[char] = &[
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v',
'v', 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9',
]; ];
pub async fn index(config: web::Data<Config>) -> Result<NamedFile, Error> { pub async fn index(config: web::Data<Config>) -> Result<NamedFile, Error> {
@ -41,11 +42,11 @@ pub async fn upload(
error::ErrorInternalServerError("could not create file") error::ErrorInternalServerError("could not create file")
})?; })?;
let parsed_multipart = multipart::parse_multipart(payload, &file_name, &config).await; let parsed_multipart = multipart::parse_multipart(payload, &file_id, &file_name, &config).await;
let UploadConfig { let UploadConfig {
original_name, original_name,
content_type,
valid_till, valid_till,
kind,
delete_on_download, delete_on_download,
} = match parsed_multipart { } = match parsed_multipart {
Ok(data) => data, Ok(data) => data,
@ -64,17 +65,14 @@ pub async fn upload(
} }
}; };
let file_name = original_name
.clone()
.unwrap_or_else(|| format!("{}.txt", file_id));
let db_insert = sqlx::query( let db_insert = sqlx::query(
"INSERT INTO Files (file_id, file_name, content_type, valid_till, delete_on_download) \ "INSERT INTO Files (file_id, file_name, valid_till, kind, delete_on_download) \
VALUES ($1, $2, $3, $4, $5)", VALUES ($1, $2, $3, $4, $5)",
) )
.bind(&file_id) .bind(&file_id)
.bind(&file_name) .bind(&original_name)
.bind(&content_type.to_string())
.bind(valid_till) .bind(valid_till)
.bind(kind.to_string())
.bind(delete_on_download) .bind(delete_on_download)
.execute(db.as_ref()) .execute(db.as_ref())
.await; .await;
@ -92,24 +90,24 @@ pub async fn upload(
} }
log::info!( log::info!(
"{} create new file {} (valid_till: {}, content_type: {}, delete_on_download: {})", "{} create new file {} (valid_till: {}, kind: {}, delete_on_download: {})",
req.connection_info().realip_remote_addr().unwrap_or("-"), req.connection_info().realip_remote_addr().unwrap_or("-"),
file_id, file_id,
valid_till, valid_till,
content_type, kind,
delete_on_download delete_on_download
); );
expiry_watch_sender.send(()).await.unwrap(); expiry_watch_sender.send(()).await.unwrap();
let redirect = if let Some(original_name) = original_name.as_ref() { let redirect = if kind == FileKind::Binary {
let encoded_name = urlencoding::encode(original_name); let encoded_name = urlencoding::encode(&original_name);
format!("/upload/{}/{}", file_id, encoded_name) format!("/upload/{}/{}", file_id, encoded_name)
} else { } else {
format!("/upload/{}", file_id) format!("/upload/{}", file_id)
}; };
let url = get_file_url(&req, &file_id, original_name.as_deref()); let url = get_file_url(&req, &file_id, Some(&original_name));
Ok(HttpResponse::SeeOther() Ok(HttpResponse::SeeOther()
.insert_header((LOCATION, redirect)) .insert_header((LOCATION, redirect))
.body(format!("{}\n", url))) .body(format!("{}\n", url)))