datatrash/src/main.rs

288 lines
9.0 KiB
Rust

mod deleter;
mod file_kind;
mod multipart;
use actix_files::{Files, NamedFile};
use actix_multipart::Multipart;
use actix_web::{
error,
http::header::{ContentDisposition, DispositionParam, DispositionType},
middleware,
web::{self, Bytes},
App, Error, FromRequest, HttpRequest, HttpResponse, HttpServer,
};
use async_std::{
channel::{self, Sender},
fs,
path::PathBuf,
task,
};
use file_kind::FileKind;
use futures::TryStreamExt;
use sqlx::{
postgres::{PgPool, PgPoolOptions, PgRow},
Row,
};
use std::env;
const INDEX_HTML: &str = include_str!("../template/index.html");
const UPLOAD_HTML: &str = include_str!("../template/upload.html");
const VIEW_HTML: &str = include_str!("../template/view.html");
async fn index(req: web::HttpRequest) -> Result<HttpResponse, Error> {
let upload_url = format!("{}/upload", get_host_url(&req));
let index_html = INDEX_HTML.replace("{upload_url}", upload_url.as_str());
Ok(HttpResponse::Ok()
.content_type("text/html")
.body(index_html))
}
// multipart data
// required: either 'file' or 'text'
// optional: 'keep_for' default to 30 minutes
async fn upload(
req: web::HttpRequest,
payload: Multipart,
db: web::Data<PgPool>,
expiry_watch_sender: web::Data<Sender<()>>,
config: web::Data<Config>,
) -> Result<HttpResponse, Error> {
let file_id = format!("{:x?}", rand::random::<u32>());
let mut filename = config.files_dir.clone();
filename.push(&file_id);
let (original_name, valid_till, kind) =
match multipart::parse_multipart(payload, &file_id, &filename).await {
Ok(data) => data,
Err(err) => {
if filename.exists().await {
fs::remove_file(filename).await.map_err(|_| {
error::ErrorInternalServerError(
"could not parse multipart; could not remove file",
)
})?;
}
return Err(err);
}
};
let db_insert = sqlx::query(
"INSERT INTO Files (file_id, file_name, valid_till, kind) VALUES ($1, $2, $3, $4)",
)
.bind(&file_id)
.bind(original_name.as_ref().unwrap_or(&file_id))
.bind(valid_till.naive_local())
.bind(kind.to_string())
.execute(db.as_ref())
.await;
if db_insert.is_err() {
fs::remove_file(filename).await.map_err(|_| {
error::ErrorInternalServerError(
"could not insert file into database; could not remove file",
)
})?;
return Err(error::ErrorInternalServerError(
"could not insert file into database",
));
}
log::info!(
"create new file {} (valid_till: {}, kind: {})",
file_id,
valid_till,
kind
);
expiry_watch_sender.send(()).await.unwrap();
let redirect = if kind == FileKind::BINARY && original_name.is_some() {
let encoded_name = urlencoding::encode(original_name.as_ref().unwrap());
format!("/upload/{}/{}", file_id, encoded_name)
} else {
format!("/upload/{}", file_id)
};
let url = get_file_url(&req, &file_id, original_name.as_deref());
Ok(HttpResponse::SeeOther()
.header("location", redirect)
.body(format!("{}\n", url)))
}
fn get_host_url(req: &web::HttpRequest) -> String {
let conn = req.connection_info();
format!("{}://{}", conn.scheme(), conn.host())
}
fn get_file_url(req: &web::HttpRequest, id: &str, name: Option<&str>) -> String {
if let Some(name) = name {
let encoded_name = urlencoding::encode(name);
format!("{}/file/{}/{}", get_host_url(req), id, encoded_name)
} else {
format!("{}/file/{}", get_host_url(req), id)
}
}
async fn uploaded(req: web::HttpRequest) -> Result<HttpResponse, Error> {
let id = req.match_info().query("id");
let name = req.match_info().get("name");
let url = get_file_url(&req, id, name);
let upload_html = UPLOAD_HTML.replace("{url}", url.as_str());
Ok(HttpResponse::Ok()
.content_type("text/html")
.body(upload_html))
}
async fn download(
req: HttpRequest,
db: web::Data<PgPool>,
config: web::Data<Config>,
) -> Result<HttpResponse, Error> {
let id = req.match_info().query("id");
let mut rows = sqlx::query("SELECT file_id, file_name, kind from files WHERE file_id = $1")
.bind(id)
.fetch(db.as_ref());
let row: PgRow = rows
.try_next()
.await
.map_err(|_| error::ErrorInternalServerError("could not run select statement"))?
.ok_or_else(|| error::ErrorNotFound("file does not exist or has expired"))?;
let file_id: String = row.get("file_id");
let file_name: String = row.get("file_name");
let kind: String = row.get("kind");
let mut path = config.files_dir.clone();
path.push(&file_id);
if kind == FileKind::TEXT.to_string() && !req.query_string().contains("raw") {
let content = fs::read_to_string(path).await.map_err(|_| {
error::ErrorInternalServerError("this file should be here but could not be found")
})?;
let encoded = htmlescape::encode_minimal(&content);
let view_html = VIEW_HTML.replace("{text}", &encoded);
let response = HttpResponse::Ok().content_type("text/html").body(view_html);
Ok(response)
} else {
let file = NamedFile::open(path)
.map_err(|_| {
error::ErrorInternalServerError("this file should be here but could not be found")
})?
.set_content_disposition(ContentDisposition {
disposition: DispositionType::Attachment,
parameters: vec![DispositionParam::Filename(file_name)],
});
file.into_response(&req)
}
}
async fn not_found() -> Result<HttpResponse, Error> {
Ok(HttpResponse::NotFound()
.content_type("text/plain")
.body("not found"))
}
fn get_db_url() -> String {
if let Ok(database_url) = env::var("DATABASE_URL") {
return database_url;
}
let auth = if let Ok(user) = env::var("DATABASE_USER") {
if let Ok(pass) = env::var("DATABASE_PASS") {
format!("{}:{}@", user, pass)
} else {
format!("{}@", user)
}
} else {
String::new()
};
format!(
"postgresql://{auth}{host}/{name}",
auth = auth,
host = env::var("DATABASE_HOST").unwrap_or_else(|_| "localhost".to_string()),
name = env::var("DATABASE_NAME").unwrap_or_else(|_| "datatrash".to_string())
)
}
async fn setup_db() -> PgPool {
let conn_url = &get_db_url();
log::info!("Using Connection string {}", conn_url);
let pool = PgPoolOptions::new()
.max_connections(5)
.connect_timeout(std::time::Duration::from_secs(5))
.connect(conn_url)
.await
.expect("could not create db pool");
sqlx::query(include_str!("../init-db.sql"))
.execute(&pool)
.await
.expect("could not create table Files");
pool
}
#[derive(Clone)]
struct Config {
files_dir: PathBuf,
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info");
}
env_logger::init();
let pool: PgPool = setup_db().await;
let config = Config {
files_dir: PathBuf::from(env::var("FILES_DIR").unwrap_or_else(|_| "./files".to_owned())),
};
fs::create_dir_all(&config.files_dir)
.await
.expect("could not create directory for storing files");
let (sender, receiver) = channel::bounded(8);
log::info!("omnomnom");
task::spawn(deleter::delete_old_files(
receiver,
pool.clone(),
config.files_dir.clone(),
));
let db = web::Data::new(pool);
let expiry_watch_sender = web::Data::new(sender);
let upload_max_bytes: usize = env::var("UPLOAD_MAX_BYTES")
.ok()
.and_then(|variable| variable.parse().ok())
.unwrap_or(8 * 1024 * 1024);
let bind_address = env::var("BIND_ADDRESS").unwrap_or_else(|_| "0.0.0.0:8000".to_owned());
HttpServer::new({
move || {
App::new()
.wrap(middleware::Logger::default())
.app_data(db.clone())
.app_data(expiry_watch_sender.clone())
.app_data(Bytes::configure(|cfg| cfg.limit(upload_max_bytes)))
.data(config.clone())
.service(web::resource("/").route(web::get().to(index)))
.service(web::resource("/upload").route(web::post().to(upload)))
.service(
web::resource(["/upload/{id}", "/upload/{id}/{name}"])
.route(web::get().to(uploaded)),
)
.service(
web::resource(["/file/{id}", "/file/{id}/{name}"])
.route(web::get().to(download)),
)
.service(Files::new("/static", "static").disable_content_disposition())
.default_service(web::route().to(not_found))
}
})
.bind(bind_address)?
.run()
.await
}