Compare commits
9 Commits
c9a3af3756
...
1527445857
Author | SHA1 | Date |
---|---|---|
neri | 1527445857 | |
neri | 4ea8797149 | |
neri | 4496335f50 | |
neri | 96eadb1723 | |
neri | e0b5a3fc65 | |
neri | b414fda39a | |
neri | 53c568082d | |
neri | bf7c762f83 | |
neri | 36b9096325 |
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
24
Cargo.toml
|
@ -1,34 +1,34 @@
|
||||||
[package]
|
[package]
|
||||||
name = "datatrash"
|
name = "datatrash"
|
||||||
version = "1.1.2"
|
version = "1.1.6"
|
||||||
authors = ["neri"]
|
authors = ["neri"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.0.1", default-features = false, features = [
|
actix-web = { version = "4.2.1", default-features = false, features = [
|
||||||
"macros",
|
"macros",
|
||||||
"compress-gzip",
|
"compress-gzip",
|
||||||
"compress-zstd",
|
"compress-zstd",
|
||||||
] }
|
] }
|
||||||
sqlx = { version = "0.5.13", default-features = false, features = [
|
sqlx = { version = "0.6.2", default-features = false, features = [
|
||||||
"runtime-tokio-rustls",
|
"runtime-tokio-rustls",
|
||||||
"postgres",
|
"postgres",
|
||||||
"time",
|
"time",
|
||||||
] }
|
] }
|
||||||
env_logger = "0.9.0"
|
env_logger = "0.9.1"
|
||||||
log = "0.4.16"
|
log = "0.4.17"
|
||||||
actix-files = "0.6.0"
|
actix-files = "0.6.2"
|
||||||
tokio = { version = "1.17.0", features = ["rt", "macros", "sync"] }
|
tokio = { version = "1.21.2", features = ["rt", "macros", "sync"] }
|
||||||
actix-multipart = "0.4.0"
|
actix-multipart = "0.4.0"
|
||||||
futures-util = "0.3.21"
|
futures-util = "0.3.24"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
time = "0.2.27"
|
time = "0.3.14"
|
||||||
htmlescape = "0.3.1"
|
htmlescape = "0.3.1"
|
||||||
urlencoding = "2.1.0"
|
urlencoding = "2.1.2"
|
||||||
tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] }
|
tree_magic_mini = { version = "3.0.3", features = ["with-gpl-data"] }
|
||||||
mime = "0.3.16"
|
mime = "0.3.16"
|
||||||
url = "2.2.2"
|
url = "2.3.1"
|
||||||
actix-governor = "0.3.1"
|
actix-governor = "0.3.2"
|
||||||
governor = "0.4.2"
|
governor = "0.4.2"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use time::ext::NumericalDuration;
|
||||||
use time::Duration;
|
use time::Duration;
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
|
|
||||||
|
@ -28,8 +29,8 @@ pub async fn get_config() -> Config {
|
||||||
let max_file_size = env::var("UPLOAD_MAX_BYTES")
|
let max_file_size = env::var("UPLOAD_MAX_BYTES")
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|variable| variable.parse().ok())
|
.and_then(|variable| variable.parse().ok())
|
||||||
.unwrap_or(8 * 1024 * 1024);
|
.or(Some(8 * 1024 * 1024))
|
||||||
let max_file_size = (max_file_size != 0).then(|| max_file_size);
|
.filter(|&max_file_size| max_file_size != 0);
|
||||||
|
|
||||||
let static_dir =
|
let static_dir =
|
||||||
PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned()));
|
PathBuf::from(env::var("STATIC_DIR").unwrap_or_else(|_| "./static".to_owned()));
|
||||||
|
@ -73,8 +74,8 @@ fn get_no_auth_limits() -> Option<NoAuthLimits> {
|
||||||
(Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => {
|
(Some(auth_password), Some(max_time), Some(large_file_max_time), Some(large_file_size)) => {
|
||||||
Some(NoAuthLimits {
|
Some(NoAuthLimits {
|
||||||
auth_password,
|
auth_password,
|
||||||
max_time: Duration::seconds(max_time as i64),
|
max_time: (max_time as i64).seconds(),
|
||||||
large_file_max_time: Duration::seconds(large_file_max_time as i64),
|
large_file_max_time: (large_file_max_time as i64).seconds(),
|
||||||
large_file_size,
|
large_file_size,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use sqlx::postgres::{PgPool, PgPoolOptions};
|
use sqlx::postgres::{PgPool, PgPoolOptions};
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use time::ext::NumericalStdDuration;
|
||||||
|
|
||||||
pub async fn setup_db() -> PgPool {
|
pub async fn setup_db() -> PgPool {
|
||||||
let conn_url = &get_db_url();
|
let conn_url = &get_db_url();
|
||||||
|
@ -7,7 +8,7 @@ pub async fn setup_db() -> PgPool {
|
||||||
|
|
||||||
let pool = PgPoolOptions::new()
|
let pool = PgPoolOptions::new()
|
||||||
.max_connections(5)
|
.max_connections(5)
|
||||||
.connect_timeout(std::time::Duration::from_secs(5))
|
.acquire_timeout(5.std_seconds())
|
||||||
.connect(conn_url)
|
.connect(conn_url)
|
||||||
.await
|
.await
|
||||||
.expect("could not create db pool");
|
.expect("could not create db pool");
|
||||||
|
|
|
@ -1,17 +1,19 @@
|
||||||
use std::{path::PathBuf, str::FromStr};
|
use std::{path::PathBuf, str::FromStr, time::SystemTime};
|
||||||
|
|
||||||
use actix_files::NamedFile;
|
use actix_files::NamedFile;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
error,
|
error,
|
||||||
http::header::{
|
http::header::{
|
||||||
Accept, Charset, ContentDisposition, DispositionParam, DispositionType, ExtendedValue,
|
Accept, CacheControl, CacheDirective, Charset, ContentDisposition, DispositionParam,
|
||||||
Header, HeaderValue, CONTENT_TYPE, VARY,
|
DispositionType, Expires, ExtendedValue, Header, HeaderValue, HttpDate, TryIntoHeaderValue,
|
||||||
|
ACCEPT, CACHE_CONTROL, CONTENT_TYPE, EXPIRES, VARY,
|
||||||
},
|
},
|
||||||
web, Error, HttpRequest, HttpResponse,
|
web, Error, HttpRequest, HttpResponse,
|
||||||
};
|
};
|
||||||
use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML};
|
use mime::{Mime, APPLICATION_OCTET_STREAM, TEXT_HTML};
|
||||||
use sqlx::postgres::PgPool;
|
use sqlx::postgres::PgPool;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use time::OffsetDateTime;
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
@ -35,16 +37,19 @@ pub async fn download(
|
||||||
config: web::Data<Config>,
|
config: web::Data<Config>,
|
||||||
) -> Result<HttpResponse, Error> {
|
) -> Result<HttpResponse, Error> {
|
||||||
let id = req.match_info().query("id");
|
let id = req.match_info().query("id");
|
||||||
let (file_id, file_name, content_type, delete) = load_file_info(id, &db).await?;
|
let (file_id, file_name, valid_till, content_type, delete) = load_file_info(id, &db).await?;
|
||||||
let mut path = config.files_dir.clone();
|
let mut path = config.files_dir.clone();
|
||||||
path.push(&file_id);
|
path.push(&file_id);
|
||||||
|
|
||||||
let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM);
|
let mime = Mime::from_str(&content_type).unwrap_or(APPLICATION_OCTET_STREAM);
|
||||||
let response = match get_view_type(&req, &mime, &path, delete).await {
|
let mut response = match get_view_type(&req, &mime, &path, delete).await {
|
||||||
ViewType::Raw => build_file_response(false, &file_name, path, mime, &req),
|
ViewType::Raw => build_file_response(false, &file_name, path, mime, &req).await,
|
||||||
ViewType::Download => build_file_response(true, &file_name, path, mime, &req),
|
ViewType::Download => build_file_response(true, &file_name, path, mime, &req).await,
|
||||||
ViewType::Html => build_text_response(&path).await,
|
ViewType::Html => build_text_response(&path).await,
|
||||||
};
|
}?;
|
||||||
|
|
||||||
|
insert_cache_headers(&mut response, valid_till);
|
||||||
|
|
||||||
if delete {
|
if delete {
|
||||||
deleter::delete_by_id(&db, &file_id, &config.files_dir)
|
deleter::delete_by_id(&db, &file_id, &config.files_dir)
|
||||||
.await
|
.await
|
||||||
|
@ -53,15 +58,16 @@ pub async fn download(
|
||||||
error::ErrorInternalServerError("could not delete file")
|
error::ErrorInternalServerError("could not delete file")
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
response
|
|
||||||
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn load_file_info(
|
async fn load_file_info(
|
||||||
id: &str,
|
id: &str,
|
||||||
db: &web::Data<sqlx::Pool<sqlx::Postgres>>,
|
db: &web::Data<sqlx::Pool<sqlx::Postgres>>,
|
||||||
) -> Result<(String, String, String, bool), Error> {
|
) -> Result<(String, String, OffsetDateTime, String, bool), Error> {
|
||||||
sqlx::query_as(
|
sqlx::query_as(
|
||||||
"SELECT file_id, file_name, content_type, delete_on_download from files WHERE file_id = $1",
|
"SELECT file_id, file_name, valid_till, content_type, delete_on_download from files WHERE file_id = $1",
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.fetch_optional(db.as_ref())
|
.fetch_optional(db.as_ref())
|
||||||
|
@ -136,7 +142,7 @@ async fn build_text_response(path: &Path) -> Result<HttpResponse, Error> {
|
||||||
.body(html))
|
.body(html))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_file_response(
|
async fn build_file_response(
|
||||||
download: bool,
|
download: bool,
|
||||||
file_name: &str,
|
file_name: &str,
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
@ -158,6 +164,7 @@ fn build_file_response(
|
||||||
})?
|
})?
|
||||||
.set_content_type(mime)
|
.set_content_type(mime)
|
||||||
.set_content_disposition(content_disposition);
|
.set_content_disposition(content_disposition);
|
||||||
|
|
||||||
let mut response = file.into_response(req);
|
let mut response = file.into_response(req);
|
||||||
add_headers(req, download, &mut response);
|
add_headers(req, download, &mut response);
|
||||||
Ok(response)
|
Ok(response)
|
||||||
|
@ -194,3 +201,33 @@ fn get_disposition_params(filename: &str) -> Vec<DispositionParam> {
|
||||||
}
|
}
|
||||||
parameters
|
parameters
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn insert_cache_headers(response: &mut HttpResponse, valid_till: OffsetDateTime) {
|
||||||
|
if response.status().is_success() {
|
||||||
|
let valid_duration = valid_till - OffsetDateTime::now_utc();
|
||||||
|
let valid_cache_seconds = valid_duration.whole_seconds().clamp(0, u32::MAX as i64) as u32;
|
||||||
|
response.headers_mut().insert(
|
||||||
|
CACHE_CONTROL,
|
||||||
|
CacheControl(vec![
|
||||||
|
CacheDirective::Public,
|
||||||
|
CacheDirective::MustRevalidate,
|
||||||
|
CacheDirective::MaxAge(valid_cache_seconds), // todo: expiry in seconds
|
||||||
|
CacheDirective::NoTransform,
|
||||||
|
CacheDirective::Extension("immutable".to_owned(), None),
|
||||||
|
])
|
||||||
|
.try_into_value()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
response.headers_mut().insert(
|
||||||
|
EXPIRES,
|
||||||
|
Expires(HttpDate::from(
|
||||||
|
SystemTime::now() + std::time::Duration::from_secs(valid_cache_seconds.into()),
|
||||||
|
))
|
||||||
|
.try_into_value()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
response
|
||||||
|
.headers_mut()
|
||||||
|
.insert(VARY, HeaderValue::from_name(ACCEPT));
|
||||||
|
}
|
||||||
|
|
|
@ -4,12 +4,11 @@ use actix_web::{error, http::header::DispositionParam, Error};
|
||||||
use futures_util::{StreamExt, TryStreamExt};
|
use futures_util::{StreamExt, TryStreamExt};
|
||||||
use mime::{Mime, TEXT_PLAIN};
|
use mime::{Mime, TEXT_PLAIN};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use time::OffsetDateTime;
|
use time::{Duration, OffsetDateTime};
|
||||||
use time::{ext::NumericalDuration, Duration};
|
|
||||||
use tokio::{fs::File, io::AsyncWriteExt};
|
use tokio::{fs::File, io::AsyncWriteExt};
|
||||||
|
|
||||||
const MAX_UPLOAD_SECONDS: i64 = 31 * 24 * 60 * 60;
|
const MAX_UPLOAD_DURATION: Duration = Duration::days(31);
|
||||||
const DEFAULT_UPLOAD_SECONDS: u32 = 30 * 60;
|
const DEFAULT_UPLOAD_DURATION: Duration = Duration::minutes(30);
|
||||||
|
|
||||||
pub(crate) struct UploadConfig {
|
pub(crate) struct UploadConfig {
|
||||||
pub original_name: Option<String>,
|
pub original_name: Option<String>,
|
||||||
|
@ -25,7 +24,7 @@ pub(crate) async fn parse_multipart(
|
||||||
) -> Result<UploadConfig, error::Error> {
|
) -> Result<UploadConfig, error::Error> {
|
||||||
let mut original_name: Option<String> = None;
|
let mut original_name: Option<String> = None;
|
||||||
let mut content_type: Option<Mime> = None;
|
let mut content_type: Option<Mime> = None;
|
||||||
let mut keep_for: Option<String> = None;
|
let mut keep_for_seconds: Option<String> = None;
|
||||||
let mut delete_on_download = false;
|
let mut delete_on_download = false;
|
||||||
let mut password = None;
|
let mut password = None;
|
||||||
let mut size = 0;
|
let mut size = 0;
|
||||||
|
@ -35,7 +34,7 @@ pub(crate) async fn parse_multipart(
|
||||||
let name = name.as_str();
|
let name = name.as_str();
|
||||||
match name {
|
match name {
|
||||||
"keep_for" => {
|
"keep_for" => {
|
||||||
keep_for = Some(parse_string(name, field).await?);
|
keep_for_seconds = Some(parse_string(name, field).await?);
|
||||||
}
|
}
|
||||||
"file" => {
|
"file" => {
|
||||||
let (mime, uploaded_name) = get_file_metadata(&field);
|
let (mime, uploaded_name) = get_file_metadata(&field);
|
||||||
|
@ -65,13 +64,13 @@ pub(crate) async fn parse_multipart(
|
||||||
|
|
||||||
let content_type =
|
let content_type =
|
||||||
content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?;
|
content_type.ok_or_else(|| error::ErrorBadRequest("no content type found"))?;
|
||||||
let keep_for: u32 = keep_for
|
let keep_for = keep_for_seconds
|
||||||
.map(|k| k.parse())
|
.map(|k| k.parse())
|
||||||
.transpose()
|
.transpose()
|
||||||
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
|
.map_err(|e| error::ErrorBadRequest(format!("field keep_for is not a number: {}", e)))?
|
||||||
.unwrap_or(DEFAULT_UPLOAD_SECONDS);
|
.map(Duration::seconds)
|
||||||
let valid_duration = keep_for.seconds();
|
.unwrap_or(DEFAULT_UPLOAD_DURATION);
|
||||||
let valid_till = OffsetDateTime::now_utc() + valid_duration;
|
let valid_till = OffsetDateTime::now_utc() + keep_for;
|
||||||
|
|
||||||
let upload_config = UploadConfig {
|
let upload_config = UploadConfig {
|
||||||
original_name,
|
original_name,
|
||||||
|
@ -80,7 +79,7 @@ pub(crate) async fn parse_multipart(
|
||||||
delete_on_download,
|
delete_on_download,
|
||||||
};
|
};
|
||||||
|
|
||||||
check_requirements(&upload_config, size, password, &valid_duration, config)?;
|
check_requirements(&upload_config, size, password, &keep_for, config)?;
|
||||||
|
|
||||||
Ok(upload_config)
|
Ok(upload_config)
|
||||||
}
|
}
|
||||||
|
@ -89,7 +88,7 @@ fn check_requirements(
|
||||||
upload_config: &UploadConfig,
|
upload_config: &UploadConfig,
|
||||||
size: u64,
|
size: u64,
|
||||||
password: Option<String>,
|
password: Option<String>,
|
||||||
valid_duration: &Duration,
|
keep_for: &Duration,
|
||||||
config: &config::Config,
|
config: &config::Config,
|
||||||
) -> Result<(), error::Error> {
|
) -> Result<(), error::Error> {
|
||||||
if let Some(original_name) = upload_config.original_name.as_ref() {
|
if let Some(original_name) = upload_config.original_name.as_ref() {
|
||||||
|
@ -98,17 +97,16 @@ fn check_requirements(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let valid_seconds = valid_duration.whole_seconds();
|
if *keep_for > MAX_UPLOAD_DURATION {
|
||||||
if valid_seconds > MAX_UPLOAD_SECONDS {
|
|
||||||
return Err(error::ErrorBadRequest(format!(
|
return Err(error::ErrorBadRequest(format!(
|
||||||
"maximum allowed validity is {} seconds, but you specified {} seconds",
|
"maximum allowed validity is {}, but you specified {}",
|
||||||
MAX_UPLOAD_SECONDS, valid_seconds
|
MAX_UPLOAD_DURATION, keep_for
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(no_auth_limits) = &config.no_auth_limits {
|
if let Some(no_auth_limits) = &config.no_auth_limits {
|
||||||
let requires_auth = valid_seconds > no_auth_limits.max_time.whole_seconds()
|
let requires_auth = *keep_for > no_auth_limits.max_time
|
||||||
|| valid_seconds > no_auth_limits.large_file_max_time.whole_seconds()
|
|| *keep_for > no_auth_limits.large_file_max_time
|
||||||
&& size > no_auth_limits.large_file_size;
|
&& size > no_auth_limits.large_file_size;
|
||||||
// hIGh sECUriTy paSsWoRD CHEck
|
// hIGh sECUriTy paSsWoRD CHEck
|
||||||
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {
|
if requires_auth && password.as_ref() != Some(&no_auth_limits.auth_password) {
|
||||||
|
|
|
@ -14,11 +14,6 @@ impl KeyExtractor for ForwardedPeerIpKeyExtractor {
|
||||||
type Key = IpAddr;
|
type Key = IpAddr;
|
||||||
type KeyExtractionError = &'static str;
|
type KeyExtractionError = &'static str;
|
||||||
|
|
||||||
#[cfg(feature = "log")]
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"Forwarded peer IP"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
|
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
|
||||||
let forwarded_for = req.headers().get("x-forwarded-for");
|
let forwarded_for = req.headers().get("x-forwarded-for");
|
||||||
if !self.proxied && forwarded_for.is_some() {
|
if !self.proxied && forwarded_for.is_some() {
|
||||||
|
@ -44,8 +39,7 @@ impl KeyExtractor for ForwardedPeerIpKeyExtractor {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "log")]
|
fn response_error(&self, err: Self::KeyExtractionError) -> actix_web::Error {
|
||||||
fn key_name(&self, key: &Self::Key) -> Option<String> {
|
actix_web::error::ErrorUnauthorized(err.to_string())
|
||||||
Some(key.to_string())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,15 +12,13 @@ const AUTH_SNIPPET_HTML: &str = include_str!("../snippet/auth.html.snippet");
|
||||||
const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet");
|
const MAX_SIZE_SNIPPET_HTML: &str = include_str!("../snippet/max_size.html.snippet");
|
||||||
|
|
||||||
pub async fn write_prefillable_templates(config: &Config) {
|
pub async fn write_prefillable_templates(config: &Config) {
|
||||||
let index_html = build_index_html(config);
|
|
||||||
let auth_hide_js = build_auth_hide_js(config);
|
|
||||||
let index_path = config.static_dir.join("index.html");
|
let index_path = config.static_dir.join("index.html");
|
||||||
let auth_hide_path = config.static_dir.join("auth-hide.js");
|
fs::write(index_path, build_index_html(config))
|
||||||
|
|
||||||
fs::write(index_path, index_html)
|
|
||||||
.await
|
.await
|
||||||
.expect("could not write index.html to static folder");
|
.expect("could not write index.html to static folder");
|
||||||
if let Some(auth_hide_js) = auth_hide_js {
|
|
||||||
|
let auth_hide_path = config.static_dir.join("auth-hide.js");
|
||||||
|
if let Some(auth_hide_js) = build_auth_hide_js(config) {
|
||||||
fs::write(auth_hide_path, auth_hide_js)
|
fs::write(auth_hide_path, auth_hide_js)
|
||||||
.await
|
.await
|
||||||
.expect("could not write auth-hide.js to static folder");
|
.expect("could not write auth-hide.js to static folder");
|
||||||
|
@ -33,45 +31,30 @@ pub async fn write_prefillable_templates(config: &Config) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_index_html(config: &Config) -> String {
|
fn build_index_html(config: &Config) -> String {
|
||||||
let auth_snippet = config
|
let mut html = INDEX_HTML.to_owned();
|
||||||
.no_auth_limits
|
if let Some(limit) = config.no_auth_limits.as_ref() {
|
||||||
.as_ref()
|
html = html
|
||||||
.map_or("", |_| AUTH_SNIPPET_HTML);
|
.replace("{auth_snippet}", AUTH_SNIPPET_HTML.trim_end())
|
||||||
let max_size_snippet = config
|
.replace("{auth_time}", &render_duration(limit.max_time))
|
||||||
.max_file_size
|
.replace(
|
||||||
.as_ref()
|
"{auth_large_time}",
|
||||||
.map_or("", |_| MAX_SIZE_SNIPPET_HTML);
|
&render_duration(limit.large_file_max_time),
|
||||||
INDEX_HTML
|
)
|
||||||
.replace("{max_size_snippet}", max_size_snippet)
|
.replace(
|
||||||
.replace(
|
"{auth_large_size}",
|
||||||
"{max_size}",
|
&render_file_size(limit.large_file_size),
|
||||||
&render_file_size(config.max_file_size.unwrap_or(0)),
|
);
|
||||||
)
|
} else {
|
||||||
.replace("{auth_snippet}", auth_snippet)
|
html = html.replace("{auth_snippet}", "");
|
||||||
.replace(
|
}
|
||||||
"{auth_time}",
|
if let Some(max_file_size) = config.max_file_size {
|
||||||
&config
|
html = html
|
||||||
.no_auth_limits
|
.replace("{max_size_snippet}", MAX_SIZE_SNIPPET_HTML.trim_end())
|
||||||
.as_ref()
|
.replace("{max_size}", &render_file_size(max_file_size));
|
||||||
.map(|limit| limit.max_time)
|
} else {
|
||||||
.map_or("".into(), render_duration),
|
html = html.replace("{max_size_snippet}", "")
|
||||||
)
|
};
|
||||||
.replace(
|
html
|
||||||
"{auth_large_time}",
|
|
||||||
&config
|
|
||||||
.no_auth_limits
|
|
||||||
.as_ref()
|
|
||||||
.map(|limit| limit.large_file_max_time)
|
|
||||||
.map_or("".into(), render_duration),
|
|
||||||
)
|
|
||||||
.replace(
|
|
||||||
"{auth_large_size}",
|
|
||||||
&config
|
|
||||||
.no_auth_limits
|
|
||||||
.as_ref()
|
|
||||||
.map(|limit| limit.large_file_size)
|
|
||||||
.map_or("".into(), render_file_size),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_file_size(size: u64) -> String {
|
fn render_file_size(size: u64) -> String {
|
||||||
|
|
Loading…
Reference in New Issue