From f97b3d79befa1d8ddbb9853f8bef7245971bad34 Mon Sep 17 00:00:00 2001 From: neri Date: Thu, 8 Apr 2021 00:03:02 +0200 Subject: [PATCH] implement basic auth and additional upload limits for unauthenticated users --- Dockerfile | 2 +- README.md | 47 ++++++++++++++++++++++-------- docker-compose.yml | 40 +++++++++++++++++++------ src/config.rs | 31 ++++++++++++++++++++ src/main.rs | 3 ++ src/multipart.rs | 55 ++++++++++++++++++++++++++--------- src/upload.rs | 39 +++++++++++++++++++++---- template/auth-hide.js | 36 +++++++++++++++++++++++ template/index-auth.html | 63 ++++++++++++++++++++++++++++++++++++++++ 9 files changed, 275 insertions(+), 41 deletions(-) create mode 100644 template/auth-hide.js create mode 100644 template/index-auth.html diff --git a/Dockerfile b/Dockerfile index 070711f..6dd1279 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM ekidd/rust-musl-builder:latest as build +FROM ekidd/rust-musl-builder:beta as build USER rust WORKDIR /home/rust/src/ diff --git a/README.md b/README.md index 6066118..d9ff565 100644 --- a/README.md +++ b/README.md @@ -25,17 +25,40 @@ docker-compose up -d --build ## running & config -The static files directory needs to be next to the binary. +- The static files directory needs to be next to the binary. +- The maximum filename length is 255 -| environment variable | default value | -| -------------------- | -------------------- | -| DATABASE_URL | | -| DATABASE_USER | | -| DATABASE_PASS | | -| DATABASE_HOST | localhost | -| DATABASE_NAME | datatrash | -| FILES_DIR | ./files | -| UPLOAD_MAX_BYTES | 8388608 (8MiB) | -| BIND_ADDRESS | 0.0.0.0:8000 | +### General configuration -The maximum filename length is 255 +| environment variable | default value | +| -------------------- | -------------- | +| FILES_DIR | ./files | +| UPLOAD_MAX_BYTES | 8388608 (8MiB) | +| BIND_ADDRESS | 0.0.0.0:8000 | + +### Database configuration + +| environment variable | default value | +| -------------------- | ------------- | +| DATABASE_URL | | +| DATABASE_USER | | +| DATABASE_PASS | | +| DATABASE_HOST | localhost | +| DATABASE_NAME | datatrash | + +### No auth limits configuration + +Require authentication for certain uploads + +- The password is provided as plain text +- Uploads with longer validity than NO_AUTH_MAX_TIME require authentication +- Uploads larger than NO_AUTH_LARGE_FILE_SIZE require auth when they are valid for longer than + NO_AUTH_LARGE_FILE_MAX_TIME +- All times are in seconds, the size is in bytes + +| environment variable | default value | +| --------------------------- | ------------- | +| AUTH_PASSWORD | | +| NO_AUTH_MAX_TIME | | +| NO_AUTH_LARGE_FILE_MAX_TIME | | +| NO_AUTH_LARGE_FILE_SIZE | | diff --git a/docker-compose.yml b/docker-compose.yml index d5fe39c..f7ed2d6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,16 +2,38 @@ version: "3.3" services: datatrash: build: . + container_name: datatrash + volumes: + - /data/datatrash/files:/opt/datatrash/files environment: - DATABASE_USER: admin - DATABASE_PASSWORD: secure - DATABASE_HOST: postgres - ports: - - '8000:8000' - postgres: + DATABASE_HOST: db + DATABASE_USER: datatrash + DATABASE_PASS: jNmLZYr75as0W5TY7iSaIEVPSa2awaWAAgC5Zt8JsRAXbYrscLW4Dk7ZxHL1Bu4v + UPLOAD_MAX_BYTES: 1073741824 + AUTH_PASSWORD: auth_password + NO_AUTH_MAX_TIME: 604800 + NO_AUTH_LARGE_FILE_MAX_TIME: 1800 + NO_AUTH_LARGE_FILE_SIZE: 10485760 + networks: + - default + - nginx + restart: unless-stopped + depends_on: + - db + db: image: postgres + container_name: datatrash-db + volumes: + - /data/datatrash/db:/var/lib/postgresql/data environment: - POSTGRES_USER: admin - POSTGRES_PASSWORD: secure - + POSTGRES_DB: datatrash + POSTGRES_USER: datatrash + POSTGRES_PASSWORD: jNmLZYr75as0W5TY7iSaIEVPSa2awaWAAgC5Zt8JsRAXbYrscLW4Dk7ZxHL1Bu4v + restart: unless-stopped + networks: + - default +networks: + nginx: + external: + name: nginx_default diff --git a/src/config.rs b/src/config.rs index b8597af..b470991 100644 --- a/src/config.rs +++ b/src/config.rs @@ -6,6 +6,15 @@ use async_std::{fs, path::PathBuf}; pub struct Config { pub files_dir: PathBuf, pub max_file_size: Option, + pub no_auth_limits: Option, +} + +#[derive(Clone)] +pub struct NoAuthLimits { + pub auth_password: String, + pub max_time: u64, + pub large_file_max_time: u64, + pub large_file_size: u64, } pub async fn get_config() -> Config { @@ -20,8 +29,30 @@ pub async fn get_config() -> Config { .await .expect("could not create directory for storing files"); + let no_auth_limits = match ( + env::var("AUTH_PASSWORD").ok(), + env_number("NO_AUTH_MAX_TIME"), + env_number("NO_AUTH_LARGE_FILE_MAX_TIME"), + env_number("NO_AUTH_LARGE_FILE_SIZE"), + ) { + (Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => { + Some(NoAuthLimits { + auth_password, + max_time, + large_file_max_time, + large_file_size, + }) + } + _ => None, + }; + Config { files_dir, max_file_size, + no_auth_limits, } } + +fn env_number(variable: &str) -> Option { + env::var(variable).ok().and_then(|n| n.parse::().ok()) +} diff --git a/src/main.rs b/src/main.rs index 1b2a5a3..73f2c7c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -52,6 +52,9 @@ async fn main() -> std::io::Result<()> { web::resource(["/upload/{id}", "/upload/{id}/{name}"]) .route(web::get().to(upload::uploaded)), ) + .service( + web::resource("/assets/auth-hide.js").route(web::get().to(upload::auth_hide)), + ) .service(Files::new("/static", "static").disable_content_disposition()) .service( web::resource([ diff --git a/src/multipart.rs b/src/multipart.rs index f354e2b..b30f527 100644 --- a/src/multipart.rs +++ b/src/multipart.rs @@ -1,4 +1,4 @@ -use crate::file_kind::FileKind; +use crate::{config, file_kind::FileKind}; use actix_multipart::{Field, Multipart}; use actix_web::{error, http::header::DispositionParam}; use async_std::{fs, fs::File, path::Path, prelude::*}; @@ -16,12 +16,14 @@ pub(crate) async fn parse_multipart( mut payload: Multipart, file_id: &str, filename: &Path, - max_size: Option, + config: &config::Config, ) -> Result { let mut original_name: Option = None; let mut keep_for: Option = None; let mut kind: Option = None; let mut delete_on_download = false; + let mut password = None; + let mut size = 0; while let Ok(Some(field)) = payload.try_next().await { let name = get_field_name(&field)?; @@ -40,7 +42,7 @@ pub(crate) async fn parse_multipart( let mut file = fs::File::create(&filename) .await .map_err(|_| error::ErrorInternalServerError("could not create file"))?; - write_to_file(&mut file, field, max_size).await?; + size = write_to_file(&mut file, field, config.max_file_size).await?; } "text" => { if original_name.is_some() { @@ -51,11 +53,14 @@ pub(crate) async fn parse_multipart( let mut file = fs::File::create(&filename) .await .map_err(|_| error::ErrorInternalServerError("could not create file"))?; - write_to_file(&mut file, field, max_size).await?; + size = write_to_file(&mut file, field, config.max_file_size).await?; } "delete_on_download" => { delete_on_download = dbg!(parse_string(name, field).await?) != "false"; } + "password" => { + password = Some(parse_string(name, field).await?); + } _ => {} }; } @@ -66,21 +71,24 @@ pub(crate) async fn parse_multipart( if original_name.len() > 255 { return Err(error::ErrorBadRequest("filename is too long")); } - let valid_till = if let Some(keep_for) = keep_for { - let keep_for = keep_for.parse().map_err(|e| { + let validated_keep_for: u64 = if let Some(keep_for) = keep_for { + let seconds = keep_for.parse().map_err(|e| { error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)) })?; - let max_keep_for = Duration::days(31).num_seconds(); - if keep_for > max_keep_for { + let max_keep_for = Duration::days(31).num_seconds() as u64; + if seconds > max_keep_for { return Err(error::ErrorBadRequest(format!( "maximum allowed validity is {} seconds, but you specified {} seconds", - max_keep_for, keep_for + max_keep_for, seconds ))); } - Local::now() + Duration::seconds(keep_for) + seconds } else { - Local::now() + Duration::seconds(1800) + 1800 }; + let valid_till = Local::now() + Duration::seconds(validated_keep_for as i64); + + check_auth_requirements(size, validated_keep_for, password, config)?; Ok(UploadConfig { original_name, @@ -90,6 +98,25 @@ pub(crate) async fn parse_multipart( }) } +fn check_auth_requirements( + size: u64, + validated_keep_for: u64, + password: Option, + config: &config::Config, +) -> Result<(), error::Error> { + if let Some(no_auth_limits) = &config.no_auth_limits { + let requires_auth = validated_keep_for > no_auth_limits.max_time + || validated_keep_for > no_auth_limits.large_file_max_time + && size > no_auth_limits.large_file_size; + if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) { + return Err(error::ErrorBadRequest( + "upload requires authentication, but authentication was incorrect", + )); + } + } + Ok(()) +} + fn get_field_name(field: &Field) -> Result { Ok(field .content_disposition() @@ -117,12 +144,12 @@ async fn write_to_file( file: &mut File, mut field: actix_multipart::Field, max_size: Option, -) -> Result<(), error::Error> { +) -> Result { let mut written_bytes: u64 = 0; while let Some(chunk) = field.next().await { let chunk = chunk.map_err(error::ErrorBadRequest)?; + written_bytes += chunk.len() as u64; if let Some(max_size) = max_size { - written_bytes += chunk.len() as u64; if written_bytes > max_size { return Err(error::ErrorBadRequest(format!( "exceeded maximum file size of {} bytes", @@ -134,7 +161,7 @@ async fn write_to_file( .await .map_err(|_| error::ErrorInternalServerError("could not write file"))?; } - Ok(()) + Ok(written_bytes) } fn get_original_filename(field: &actix_multipart::Field) -> Option { diff --git a/src/upload.rs b/src/upload.rs index 203a301..f4bdf45 100644 --- a/src/upload.rs +++ b/src/upload.rs @@ -9,6 +9,8 @@ use rand::prelude::SliceRandom; use sqlx::postgres::PgPool; const INDEX_HTML: &str = include_str!("../template/index.html"); +const INDEX_AUTH_HTML: &str = include_str!("../template/index-auth.html"); +const AUTH_HIDE_JS: &str = include_str!("../template/auth-hide.js"); const UPLOAD_HTML: &str = include_str!("../template/upload.html"); const ID_CHARS: &[char] = &[ @@ -16,12 +18,40 @@ const ID_CHARS: &[char] = &[ 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9', ]; -pub async fn index(req: web::HttpRequest) -> Result { +pub async fn index( + req: web::HttpRequest, + config: web::Data, +) -> Result { let upload_url = format!("{}/upload", get_host_url(&req)); - let index_html = INDEX_HTML.replace("{upload_url}", upload_url.as_str()); + let index_html = if config.no_auth_limits.is_some() { + INDEX_AUTH_HTML + } else { + INDEX_HTML + }; + let filled_index_html = index_html.replace("{upload_url}", upload_url.as_str()); Ok(HttpResponse::Ok() .content_type("text/html") - .body(index_html)) + .body(filled_index_html)) +} + +pub async fn auth_hide(config: web::Data) -> Result { + if let Some(no_auth_limits) = &config.no_auth_limits { + let auth_hide_js = AUTH_HIDE_JS + .replace("{no_auth_max_time}", &no_auth_limits.max_time.to_string()) + .replace( + "{no_auth_large_file_max_time}", + &no_auth_limits.large_file_max_time.to_string(), + ) + .replace( + "{no_auth_large_file_size}", + &no_auth_limits.large_file_size.to_string(), + ); + Ok(HttpResponse::Ok() + .content_type("application/javascript") + .body(auth_hide_js)) + } else { + Err(error::ErrorNotFound("file not found")) + } } pub async fn upload( @@ -35,8 +65,7 @@ pub async fn upload( let mut filename = config.files_dir.clone(); filename.push(&file_id); - let parsed_multipart = - multipart::parse_multipart(payload, &file_id, &filename, config.max_file_size).await; + let parsed_multipart = multipart::parse_multipart(payload, &file_id, &filename, &config).await; let UploadConfig { original_name, valid_till, diff --git a/template/auth-hide.js b/template/auth-hide.js new file mode 100644 index 0000000..a89ca8e --- /dev/null +++ b/template/auth-hide.js @@ -0,0 +1,36 @@ +const fileUpload = document.getElementById("file-upload"); +const textUpload = document.getElementById("text-upload"); +const keepFor = document.getElementById("keep_for"); +const passwordInput = document.getElementById("password-input"); + +const maxTime = Number("{no_auth_max_time}"); +const largeFileMaxTime = Number("{no_auth_large_file_max_time}"); +const largeFileSize = Number("{no_auth_large_file_size}"); +const updatePasswordInput = () => { + const requirePassword = keep > maxTime || (size > largeFileSize && keep > largeFileMaxTime); + passwordInput.className = requirePassword ? "" : "hidden"; +}; + +let keep = Number(keepFor.value); +let size = fileUpload.files[0] + ? fileUpload.files[0].size + : textUpload.value.length; +updatePasswordInput(); + +fileUpload.addEventListener("change", (e) => { + size = fileUpload.files[0] + ? fileUpload.files[0].size + : textUpload.value.length; + updatePasswordInput(); +}); +textUpload.addEventListener("input", (e) => { + if (!fileUpload.files[0]) { + size = textUpload.value.length; + updatePasswordInput(); + } +}); +keepFor.addEventListener("change", (e) => { + keep = Number(keepFor.value); + updatePasswordInput(); +}); + diff --git a/template/index-auth.html b/template/index-auth.html new file mode 100644 index 0000000..d0b22a9 --- /dev/null +++ b/template/index-auth.html @@ -0,0 +1,63 @@ + + + + datatrash + + + + + + +
+

datatrash

+
+ +
+ +
+ +
+ +
+ + +
+ + +
+
+ +
+ +
+ +
+
+
+file upload
+  curl -F 'file=@yourfile.rs' {upload_url}
+text upload
+  curl -F 'text=your text' {upload_url}
+including time
+  curl -F 'text=your text' -F 'keep_for=1800' {upload_url}
+limit to one download
+  curl -F 'text=your text' -F 'delete_on_download=true' {upload_url}
+
+
+ + +