diff --git a/Cargo.toml b/Cargo.toml index 17fc5bf..551ba3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] } chrono = { version = "0.4.38", features = ["serde"] } file-format = { version = "0.25.0", features = ["reader"] } log = "0.4" +lz4_flex = "0.11.3" maud = { version = "0.26", features = ["rocket"] } rand = "0.8.5" rocket = { version = "0.5", features = ["json"] } @@ -19,6 +20,8 @@ uuid = { version = "1.11.0", features = ["v4"] } [profile.production] inherits = "release" +strip = true lto = true opt-level = "z" codegen-units = 1 +panic = "abort" diff --git a/src/database.rs b/src/database.rs index af131c7..985d7f8 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,15 +1,11 @@ use std::{ - collections::{hash_map::Values, HashMap, HashSet}, - ffi::OsStr, - fs::{self, File}, - path::{Path, PathBuf}, - sync::{Arc, RwLock}, + collections::{hash_map::Values, HashMap, HashSet}, ffi::OsStr, fs::{self, File}, io, path::{Path, PathBuf}, sync::{Arc, RwLock} }; use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode}; use blake3::Hash; use chrono::{DateTime, TimeDelta, Utc}; -use log::{info, warn}; +use log::{error, info, warn}; use rand::distributions::{Alphanumeric, DistString}; use rocket::{ serde::{Deserialize, Serialize}, @@ -32,38 +28,52 @@ pub struct Database { } impl Database { - pub fn new>(path: &P) -> Self { - let mut file = File::create_new(path).expect("Could not create database!"); - + pub fn new>(path: &P) -> Result { let output = Self { path: path.as_ref().to_path_buf(), entries: HashMap::new(), hashes: HashMap::new(), }; - encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!"); + // Save the database initially after creating it + output.save()?; - output + Ok(output) + } + + /// Open the database from a path + pub fn open>(path: &P) -> Result { + let file = File::open(path)?; + let mut lz4_file = lz4_flex::frame::FrameDecoder::new(file); + + decode_from_std_read(&mut lz4_file, BINCODE_CFG) + .map_err(|e| io::Error::other(format!("failed to open database: {e}"))) } /// Open the database from a path, **or create it if it does not exist** - pub fn open>(path: &P) -> Self { + pub fn open_or_new>(path: &P) -> Result { if !path.as_ref().exists() { Self::new(path) } else { - let mut file = File::open(path).expect("Could not get database file!"); - decode_from_std_read(&mut file, BINCODE_CFG).expect("Could not decode database") + Self::open(path) } } /// Save the database to its file - pub fn save(&self) { - let mut out_path = self.path.clone(); - out_path.set_extension(".bkp"); - let mut file = File::create(&out_path).expect("Could not save!"); - encode_into_std_write(self, &mut file, BINCODE_CFG).expect("Could not write out!"); + pub fn save(&self) -> Result<(), io::Error> { + // Create a file and write the LZ4 compressed stream into it + let file = File::create(&self.path.with_extension("bkp"))?; + let mut lz4_file = lz4_flex::frame::FrameEncoder::new(file); + encode_into_std_write(self, &mut lz4_file, BINCODE_CFG) + .map_err(|e| io::Error::other(format!("failed to save database: {e}")))?; + lz4_file.try_finish()?; - fs::rename(out_path, &self.path).unwrap(); + fs::rename( + self.path.with_extension("bkp"), + &self.path + ).unwrap(); + + Ok(()) } /// Insert a [`MochiFile`] into the database. @@ -130,6 +140,10 @@ impl Database { self.entries.get(mmid) } + pub fn get_hash(&self, hash: &Hash) -> Option<&HashSet> { + self.hashes.get(hash) + } + pub fn entries(&self) -> Values<'_, Mmid, MochiFile> { self.entries.values() } @@ -163,22 +177,20 @@ pub struct MochiFile { impl MochiFile { /// Create a new file that expires in `expiry`. - pub fn new_with_expiry( + pub fn new( mmid: Mmid, name: String, extension: &str, hash: Hash, - expire_duration: TimeDelta, + upload: DateTime, + expiry: DateTime, ) -> Self { - let current = Utc::now(); - let expiry = current + expire_duration; - Self { mmid, name, extension: extension.to_string(), hash, - upload_datetime: current, + upload_datetime: upload, expiry_datetime: expiry, } } @@ -244,7 +256,9 @@ fn clean_database(db: &Arc>, file_path: &Path) { info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files."); - database.save(); + if let Err(e) = database.save() { + error!("Failed to save database: {e}") + } drop(database); // Just to be sure } diff --git a/src/main.rs b/src/main.rs index ee75143..2de95ec 100644 --- a/src/main.rs +++ b/src/main.rs @@ -101,6 +101,7 @@ async fn handle_upload( db: &State>>, settings: &State, ) -> Result, std::io::Error> { + let current = Utc::now(); // Ensure the expiry time is valid, if not return an error let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { @@ -128,31 +129,37 @@ async fn handle_upload( let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string()); file_data.file.persist_to(&temp_filename).await?; - // Get hash and random identifier + // Get hash and random identifier and expiry let file_mmid = Mmid::new(); let file_hash = hash_file(&temp_filename).await?; + let expiry = current + expire_time; // Process filetype let file_type = file_format::FileFormat::from_file(&temp_filename)?; - let constructed_file = MochiFile::new_with_expiry( + let constructed_file = MochiFile::new( file_mmid.clone(), raw_name, file_type.extension(), file_hash, - expire_time, + current, + expiry ); - // Move it to the new proper place - std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?; + // If the hash does not exist in the database, move the file to the backend, else, delete it + if db.read().unwrap().get_hash(&file_hash).is_none() { + std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?; + } else { + std::fs::remove_file(temp_filename)?; + } db.write().unwrap().insert(&file_mmid, constructed_file.clone()); Ok(Json(ClientResponse { status: true, name: constructed_file.name().clone(), - mmid: Some(file_mmid), - hash: file_hash.to_string(), + mmid: Some(constructed_file.mmid().clone()), + hash: constructed_file.hash().to_string(), expires: Some(constructed_file.expiry()), ..Default::default() })) @@ -211,7 +218,7 @@ async fn main() { ..Default::default() }; - let database = Arc::new(RwLock::new(Database::open(&config.database_path))); + let database = Arc::new(RwLock::new(Database::open_or_new(&config.database_path).expect("Failed to open or create database"))); let local_db = database.clone(); // Start monitoring thread, cleaning the database every 2 minutes @@ -255,6 +262,6 @@ async fn main() { info!("Stopping database cleaning thread completed successfully."); info!("Saving database on shutdown..."); - local_db.write().unwrap().save(); + local_db.write().unwrap().save().expect("Failed to save database"); info!("Saving database completed successfully."); } diff --git a/web/request.js b/web/request.js index 42d2ab3..6a97ab4 100644 --- a/web/request.js +++ b/web/request.js @@ -45,13 +45,12 @@ async function pasteSubmit(evt) { const form = document.getElementById("uploadForm"); const duration = form.elements.duration.value; - evt.preventDefault(); - const files = []; - [...evt.clipboardData.files].forEach((file, _) => { - // If dropped items aren't files, reject them + const len = evt.clipboardData.files.length; + for (let i = 0; i < len; i++) { + const file = evt.clipboardData.files[i]; files.push(file); - }); + } await sendFile(files, duration); } @@ -204,7 +203,9 @@ document.addEventListener("DOMContentLoaded", function(_event) { form.addEventListener("submit", formSubmit); // Respond to file paste events - window.addEventListener("paste", (event) => {pasteSubmit(event)}); + window.addEventListener("paste", (event) => { + pasteSubmit(event) + }); // Respond to drag and drop stuff let fileButton = document.getElementById("fileButton");