mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 23:32:58 -05:00
Compress database with lz4 and improve database handling
This commit is contained in:
parent
2ee07e7f3e
commit
2848ff1f54
4 changed files with 67 additions and 42 deletions
|
@ -9,6 +9,7 @@ blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
|
||||||
chrono = { version = "0.4.38", features = ["serde"] }
|
chrono = { version = "0.4.38", features = ["serde"] }
|
||||||
file-format = { version = "0.25.0", features = ["reader"] }
|
file-format = { version = "0.25.0", features = ["reader"] }
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
|
lz4_flex = "0.11.3"
|
||||||
maud = { version = "0.26", features = ["rocket"] }
|
maud = { version = "0.26", features = ["rocket"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rocket = { version = "0.5", features = ["json"] }
|
rocket = { version = "0.5", features = ["json"] }
|
||||||
|
@ -19,6 +20,8 @@ uuid = { version = "1.11.0", features = ["v4"] }
|
||||||
|
|
||||||
[profile.production]
|
[profile.production]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
|
strip = true
|
||||||
lto = true
|
lto = true
|
||||||
opt-level = "z"
|
opt-level = "z"
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
panic = "abort"
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::{hash_map::Values, HashMap, HashSet},
|
collections::{hash_map::Values, HashMap, HashSet}, ffi::OsStr, fs::{self, File}, io, path::{Path, PathBuf}, sync::{Arc, RwLock}
|
||||||
ffi::OsStr,
|
|
||||||
fs::{self, File},
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::{Arc, RwLock},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
|
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
|
||||||
use blake3::Hash;
|
use blake3::Hash;
|
||||||
use chrono::{DateTime, TimeDelta, Utc};
|
use chrono::{DateTime, TimeDelta, Utc};
|
||||||
use log::{info, warn};
|
use log::{error, info, warn};
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use rand::distributions::{Alphanumeric, DistString};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
serde::{Deserialize, Serialize},
|
serde::{Deserialize, Serialize},
|
||||||
|
@ -32,38 +28,52 @@ pub struct Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
pub fn new<P: AsRef<Path>>(path: &P) -> Self {
|
pub fn new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||||
let mut file = File::create_new(path).expect("Could not create database!");
|
|
||||||
|
|
||||||
let output = Self {
|
let output = Self {
|
||||||
path: path.as_ref().to_path_buf(),
|
path: path.as_ref().to_path_buf(),
|
||||||
entries: HashMap::new(),
|
entries: HashMap::new(),
|
||||||
hashes: HashMap::new(),
|
hashes: HashMap::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!");
|
// Save the database initially after creating it
|
||||||
|
output.save()?;
|
||||||
|
|
||||||
output
|
Ok(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Open the database from a path
|
||||||
|
pub fn open<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||||
|
let file = File::open(path)?;
|
||||||
|
let mut lz4_file = lz4_flex::frame::FrameDecoder::new(file);
|
||||||
|
|
||||||
|
decode_from_std_read(&mut lz4_file, BINCODE_CFG)
|
||||||
|
.map_err(|e| io::Error::other(format!("failed to open database: {e}")))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Open the database from a path, **or create it if it does not exist**
|
/// Open the database from a path, **or create it if it does not exist**
|
||||||
pub fn open<P: AsRef<Path>>(path: &P) -> Self {
|
pub fn open_or_new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||||
if !path.as_ref().exists() {
|
if !path.as_ref().exists() {
|
||||||
Self::new(path)
|
Self::new(path)
|
||||||
} else {
|
} else {
|
||||||
let mut file = File::open(path).expect("Could not get database file!");
|
Self::open(path)
|
||||||
decode_from_std_read(&mut file, BINCODE_CFG).expect("Could not decode database")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save the database to its file
|
/// Save the database to its file
|
||||||
pub fn save(&self) {
|
pub fn save(&self) -> Result<(), io::Error> {
|
||||||
let mut out_path = self.path.clone();
|
// Create a file and write the LZ4 compressed stream into it
|
||||||
out_path.set_extension(".bkp");
|
let file = File::create(&self.path.with_extension("bkp"))?;
|
||||||
let mut file = File::create(&out_path).expect("Could not save!");
|
let mut lz4_file = lz4_flex::frame::FrameEncoder::new(file);
|
||||||
encode_into_std_write(self, &mut file, BINCODE_CFG).expect("Could not write out!");
|
encode_into_std_write(self, &mut lz4_file, BINCODE_CFG)
|
||||||
|
.map_err(|e| io::Error::other(format!("failed to save database: {e}")))?;
|
||||||
|
lz4_file.try_finish()?;
|
||||||
|
|
||||||
fs::rename(out_path, &self.path).unwrap();
|
fs::rename(
|
||||||
|
self.path.with_extension("bkp"),
|
||||||
|
&self.path
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a [`MochiFile`] into the database.
|
/// Insert a [`MochiFile`] into the database.
|
||||||
|
@ -130,6 +140,10 @@ impl Database {
|
||||||
self.entries.get(mmid)
|
self.entries.get(mmid)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_hash(&self, hash: &Hash) -> Option<&HashSet<Mmid>> {
|
||||||
|
self.hashes.get(hash)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn entries(&self) -> Values<'_, Mmid, MochiFile> {
|
pub fn entries(&self) -> Values<'_, Mmid, MochiFile> {
|
||||||
self.entries.values()
|
self.entries.values()
|
||||||
}
|
}
|
||||||
|
@ -163,22 +177,20 @@ pub struct MochiFile {
|
||||||
|
|
||||||
impl MochiFile {
|
impl MochiFile {
|
||||||
/// Create a new file that expires in `expiry`.
|
/// Create a new file that expires in `expiry`.
|
||||||
pub fn new_with_expiry(
|
pub fn new(
|
||||||
mmid: Mmid,
|
mmid: Mmid,
|
||||||
name: String,
|
name: String,
|
||||||
extension: &str,
|
extension: &str,
|
||||||
hash: Hash,
|
hash: Hash,
|
||||||
expire_duration: TimeDelta,
|
upload: DateTime<Utc>,
|
||||||
|
expiry: DateTime<Utc>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let current = Utc::now();
|
|
||||||
let expiry = current + expire_duration;
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
mmid,
|
mmid,
|
||||||
name,
|
name,
|
||||||
extension: extension.to_string(),
|
extension: extension.to_string(),
|
||||||
hash,
|
hash,
|
||||||
upload_datetime: current,
|
upload_datetime: upload,
|
||||||
expiry_datetime: expiry,
|
expiry_datetime: expiry,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -244,7 +256,9 @@ fn clean_database(db: &Arc<RwLock<Database>>, file_path: &Path) {
|
||||||
|
|
||||||
info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files.");
|
info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files.");
|
||||||
|
|
||||||
database.save();
|
if let Err(e) = database.save() {
|
||||||
|
error!("Failed to save database: {e}")
|
||||||
|
}
|
||||||
drop(database); // Just to be sure
|
drop(database); // Just to be sure
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
23
src/main.rs
23
src/main.rs
|
@ -101,6 +101,7 @@ async fn handle_upload(
|
||||||
db: &State<Arc<RwLock<Database>>>,
|
db: &State<Arc<RwLock<Database>>>,
|
||||||
settings: &State<Settings>,
|
settings: &State<Settings>,
|
||||||
) -> Result<Json<ClientResponse>, std::io::Error> {
|
) -> Result<Json<ClientResponse>, std::io::Error> {
|
||||||
|
let current = Utc::now();
|
||||||
// Ensure the expiry time is valid, if not return an error
|
// Ensure the expiry time is valid, if not return an error
|
||||||
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
|
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
|
||||||
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
|
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
|
||||||
|
@ -128,31 +129,37 @@ async fn handle_upload(
|
||||||
let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
|
let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
|
||||||
file_data.file.persist_to(&temp_filename).await?;
|
file_data.file.persist_to(&temp_filename).await?;
|
||||||
|
|
||||||
// Get hash and random identifier
|
// Get hash and random identifier and expiry
|
||||||
let file_mmid = Mmid::new();
|
let file_mmid = Mmid::new();
|
||||||
let file_hash = hash_file(&temp_filename).await?;
|
let file_hash = hash_file(&temp_filename).await?;
|
||||||
|
let expiry = current + expire_time;
|
||||||
|
|
||||||
// Process filetype
|
// Process filetype
|
||||||
let file_type = file_format::FileFormat::from_file(&temp_filename)?;
|
let file_type = file_format::FileFormat::from_file(&temp_filename)?;
|
||||||
|
|
||||||
let constructed_file = MochiFile::new_with_expiry(
|
let constructed_file = MochiFile::new(
|
||||||
file_mmid.clone(),
|
file_mmid.clone(),
|
||||||
raw_name,
|
raw_name,
|
||||||
file_type.extension(),
|
file_type.extension(),
|
||||||
file_hash,
|
file_hash,
|
||||||
expire_time,
|
current,
|
||||||
|
expiry
|
||||||
);
|
);
|
||||||
|
|
||||||
// Move it to the new proper place
|
// If the hash does not exist in the database, move the file to the backend, else, delete it
|
||||||
|
if db.read().unwrap().get_hash(&file_hash).is_none() {
|
||||||
std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
|
std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
|
||||||
|
} else {
|
||||||
|
std::fs::remove_file(temp_filename)?;
|
||||||
|
}
|
||||||
|
|
||||||
db.write().unwrap().insert(&file_mmid, constructed_file.clone());
|
db.write().unwrap().insert(&file_mmid, constructed_file.clone());
|
||||||
|
|
||||||
Ok(Json(ClientResponse {
|
Ok(Json(ClientResponse {
|
||||||
status: true,
|
status: true,
|
||||||
name: constructed_file.name().clone(),
|
name: constructed_file.name().clone(),
|
||||||
mmid: Some(file_mmid),
|
mmid: Some(constructed_file.mmid().clone()),
|
||||||
hash: file_hash.to_string(),
|
hash: constructed_file.hash().to_string(),
|
||||||
expires: Some(constructed_file.expiry()),
|
expires: Some(constructed_file.expiry()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}))
|
}))
|
||||||
|
@ -211,7 +218,7 @@ async fn main() {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let database = Arc::new(RwLock::new(Database::open(&config.database_path)));
|
let database = Arc::new(RwLock::new(Database::open_or_new(&config.database_path).expect("Failed to open or create database")));
|
||||||
let local_db = database.clone();
|
let local_db = database.clone();
|
||||||
|
|
||||||
// Start monitoring thread, cleaning the database every 2 minutes
|
// Start monitoring thread, cleaning the database every 2 minutes
|
||||||
|
@ -255,6 +262,6 @@ async fn main() {
|
||||||
info!("Stopping database cleaning thread completed successfully.");
|
info!("Stopping database cleaning thread completed successfully.");
|
||||||
|
|
||||||
info!("Saving database on shutdown...");
|
info!("Saving database on shutdown...");
|
||||||
local_db.write().unwrap().save();
|
local_db.write().unwrap().save().expect("Failed to save database");
|
||||||
info!("Saving database completed successfully.");
|
info!("Saving database completed successfully.");
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,13 +45,12 @@ async function pasteSubmit(evt) {
|
||||||
const form = document.getElementById("uploadForm");
|
const form = document.getElementById("uploadForm");
|
||||||
const duration = form.elements.duration.value;
|
const duration = form.elements.duration.value;
|
||||||
|
|
||||||
evt.preventDefault();
|
|
||||||
|
|
||||||
const files = [];
|
const files = [];
|
||||||
[...evt.clipboardData.files].forEach((file, _) => {
|
const len = evt.clipboardData.files.length;
|
||||||
// If dropped items aren't files, reject them
|
for (let i = 0; i < len; i++) {
|
||||||
|
const file = evt.clipboardData.files[i];
|
||||||
files.push(file);
|
files.push(file);
|
||||||
});
|
}
|
||||||
|
|
||||||
await sendFile(files, duration);
|
await sendFile(files, duration);
|
||||||
}
|
}
|
||||||
|
@ -204,7 +203,9 @@ document.addEventListener("DOMContentLoaded", function(_event) {
|
||||||
form.addEventListener("submit", formSubmit);
|
form.addEventListener("submit", formSubmit);
|
||||||
|
|
||||||
// Respond to file paste events
|
// Respond to file paste events
|
||||||
window.addEventListener("paste", (event) => {pasteSubmit(event)});
|
window.addEventListener("paste", (event) => {
|
||||||
|
pasteSubmit(event)
|
||||||
|
});
|
||||||
|
|
||||||
// Respond to drag and drop stuff
|
// Respond to drag and drop stuff
|
||||||
let fileButton = document.getElementById("fileButton");
|
let fileButton = document.getElementById("fileButton");
|
||||||
|
|
Loading…
Reference in a new issue