diff --git a/confetti-box/src/database/mod.rs b/confetti-box/src/database/mod.rs index 0dda13e..332b83d 100644 --- a/confetti-box/src/database/mod.rs +++ b/confetti-box/src/database/mod.rs @@ -7,7 +7,7 @@ use std::{ use blake3::Hash; use chrono::{DateTime, NaiveDateTime, TimeDelta, Utc}; use dotenvy::dotenv; -use log::{error, info, warn}; +use log::{info, warn}; use rand::distributions::{Alphanumeric, DistString}; use rocket::{ form::{self, FromFormField, ValueField}, @@ -16,17 +16,18 @@ use rocket::{ use serde_with::serde_as; use uuid::Uuid; -use diesel::prelude::*; +use diesel::{expression::AsExpression, prelude::*, serialize::ToSql, sql_types::Binary, sqlite::Sqlite}; pub struct Mochibase { path: PathBuf, /// connection to the db - db: Arc>, + pub db: Arc>, } impl Mochibase { /// Open the database from a path, **or create it if it does not exist** pub fn open_or_new>(path: &P) -> Result { + println!("Open / New"); dotenv().ok(); let connection = SqliteConnection::establish(path.as_ref()) .unwrap_or_else(|e| panic!("Failed to connect, error: {}", e)); @@ -42,6 +43,7 @@ impl Mochibase { /// /// If the database already contained this value, then `false` is returned. pub fn insert(&mut self, mmid_: &Mmid, entry: MochiFile) -> bool { + println!("Insert"); use schema::mochifiles::dsl::*; let hash_matched_mmids: Vec = mochifiles @@ -54,7 +56,7 @@ impl Mochibase { if hash_matched_mmids.contains(mmid_) { return false; } - entry.insert_into(mochifiles).on_conflict_do_nothing(); + entry.insert_into(mochifiles).on_conflict_do_nothing().execute(&mut *self.db.lock().unwrap()).unwrap(); true } @@ -63,6 +65,7 @@ impl Mochibase { /// /// If the database did not contain this value, then `false` is returned. pub fn remove_mmid(&mut self, mmid_: &Mmid) -> bool { + println!("Remove mmid"); use schema::mochifiles::dsl::*; if diesel::delete(mochifiles.filter(mmid.eq(mmid_))).execute(&mut *self.db.lock().unwrap()).expect("Error deleting posts") > 0 { @@ -72,34 +75,22 @@ impl Mochibase { } } - /// Remove a hash from the database entirely. - /// - /// Will not remove (returns [`Some(false)`] if hash contains references. - pub fn remove_hash(&mut self, hash: &Hash) -> Option { - if let Some(s) = self.hashes.get(hash) { - if s.is_empty() { - self.hashes.remove(hash); - Some(true) - } else { - Some(false) - } - } else { - None - } - } - /// Checks if a hash contained in the database contains no more [`Mmid`]s. - pub fn is_hash_empty(&self, hash: &Hash) -> Option { - self.hashes.get(hash).map(|s| s.is_empty()) + pub fn is_hash_valid(&self, hash_: &MHash) -> bool { + println!("Is Hash Valid?"); + use schema::mochifiles::dsl::*; + !mochifiles.filter(hash.eq(hash_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).unwrap().is_empty() } /// Get an entry by its [`Mmid`]. Returns [`None`] if the value does not exist. pub fn get(&self, mmid_: &Mmid) -> Option { + println!("get mmid: {mmid_:?}"); use schema::mochifiles::dsl::*; mochifiles.filter(mmid.eq(mmid_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).unwrap().get(0).map(|f| f.clone()) } - pub fn get_hash(&self, hash_: &String) -> Option> { + pub fn get_hash(&self, hash_: &MHash) -> Option> { + println!("get hash: {hash_:?}"); use schema::mochifiles::dsl::*; let files = mochifiles.filter(hash.eq(hash_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load mochifiles by hash"); if files.is_empty() { @@ -111,7 +102,33 @@ impl Mochibase { pub fn entries(&self) -> Vec { use schema::mochifiles::dsl::*; - mochifiles.select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load all mochifiles") + dbg!(mochifiles.select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load all mochifiles")) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, AsExpression)] +#[diesel(sql_type = Binary)] +pub struct MHash(pub Hash); + +impl Queryable for MHash { + type Row = *const [u8]; + + fn build(row: Self::Row) -> diesel::deserialize::Result { + let mut val = [0u8;32]; + val.copy_from_slice(unsafe { row.as_ref().unwrap() }); + Ok(MHash(Hash::from_bytes(val))) + } +} + +impl ToSql for MHash { + fn to_sql<'b>(&'b self, out: &mut diesel::serialize::Output<'b, '_, Sqlite>) -> diesel::serialize::Result { + <[u8; 32] as ToSql>::to_sql(self.0.as_bytes(), out) + } +} + +impl ToString for MHash { + fn to_string(&self) -> String { + self.0.to_string() } } @@ -131,7 +148,7 @@ pub struct MochiFile { mime_type: String, /// The Blake3 hash of the file - hash: String, + hash: MHash, /// The datetime when the file was uploaded upload_datetime: chrono::NaiveDateTime, @@ -147,7 +164,7 @@ impl MochiFile { mmid: Mmid, name: String, mime_type: String, - hash: String, + hash: Hash, upload: NaiveDateTime, expiry: NaiveDateTime, ) -> Self { @@ -155,7 +172,7 @@ impl MochiFile { mmid, name, mime_type, - hash, + hash: MHash(hash), upload_datetime: upload, expiry_datetime: expiry, } @@ -174,7 +191,7 @@ impl MochiFile { datetime > self.expiry_datetime.and_utc() } - pub fn hash(&self) -> &String { + pub fn hash(&self) -> &MHash { &self.hash } @@ -197,9 +214,10 @@ pub fn clean_database(db: &Arc>, file_path: &Path) { // Add expired entries to the removal list let files_to_remove: Vec<_> = database .entries() + .iter() .filter_map(|e| { if e.is_expired() { - Some((e.mmid().clone(), *e.hash())) + Some((e.mmid().clone(), e.hash().clone())) } else { None } @@ -209,24 +227,21 @@ pub fn clean_database(db: &Arc>, file_path: &Path) { let mut removed_files = 0; let mut removed_entries = 0; for e in &files_to_remove { - if database.remove_mmid(&e.0) { - removed_entries += 1; - } - if database.is_hash_empty(&e.1).is_some_and(|b| b) { - database.remove_hash(&e.1); + + if !database.is_hash_valid(&e.1) { if let Err(e) = fs::remove_file(file_path.join(e.1.to_string())) { warn!("Failed to remove expired hash: {}", e); + } else { + if database.remove_mmid(&e.0) { + removed_entries += 1; + } removed_files += 1; } } } info!("Cleaned database.\n\t| Removed {removed_entries} expired entries.\n\t| Removed {removed_files} no longer referenced files."); - - if let Err(e) = database.save() { - error!("Failed to save database: {e}") - } drop(database); // Just to be sure } diff --git a/confetti-box/src/database/schema.rs b/confetti-box/src/database/schema.rs index 2c441f6..5276c5b 100644 --- a/confetti-box/src/database/schema.rs +++ b/confetti-box/src/database/schema.rs @@ -5,7 +5,7 @@ diesel::table! { mmid -> Text, name -> Text, mime_type -> Text, - hash -> Text, + hash -> Binary, upload_datetime -> Timestamp, expiry_datetime -> Timestamp, } diff --git a/confetti-box/src/lib.rs b/confetti-box/src/lib.rs index 62dbf97..a1f5466 100644 --- a/confetti-box/src/lib.rs +++ b/confetti-box/src/lib.rs @@ -17,7 +17,7 @@ use crate::{ strings::to_pretty_time, }; use chrono::{TimeDelta, Utc}; -use database::{Chunkbase, ChunkedInfo, Mmid, MochiFile, Mochibase}; +use database::{Chunkbase, ChunkedInfo, MHash, Mmid, MochiFile, Mochibase}; use maud::{html, Markup, PreEscaped}; use rocket::{ data::ToByteUnit, futures::{SinkExt as _, StreamExt as _}, get, post, serde::{json::{self, Json}, Serialize}, tokio::{ @@ -217,7 +217,7 @@ pub async fn chunked_upload_finish( // If the hash does not exist in the database, // move the file to the backend, else, delete it // This also removes it from the chunk database - if main_db.read().unwrap().get_hash(&hash).is_none() { + if main_db.read().unwrap().get_hash(&MHash(hash)).is_none() { chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?; } else { chunk_db.write().unwrap().remove_file(&uuid)?; @@ -231,8 +231,8 @@ pub async fn chunked_upload_finish( chunked_info.1.name, file_type.media_type().to_string(), hash, - now, - now + chunked_info.1.expire_duration, + now.naive_utc(), + (now + chunked_info.1.expire_duration).naive_utc(), ); main_db @@ -322,8 +322,8 @@ pub async fn websocket_upload( // If the hash does not exist in the database, // move the file to the backend, else, delete it // This also removes it from the chunk database - if main_db.read().unwrap().get_hash(&hash).is_none() { - chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?; + if main_db.read().unwrap().get_hash(&MHash(hash)).is_none() { + dbg!(chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename))?; } else { chunk_db.write().unwrap().remove_file(&uuid)?; } @@ -336,8 +336,8 @@ pub async fn websocket_upload( info.1.name, file_type.media_type().to_string(), hash, - now, - now + info.1.expire_duration, + now.naive_utc(), + (now + info.1.expire_duration).naive_utc(), ); main_db diff --git a/confetti-box/src/main.rs b/confetti-box/src/main.rs index ca59f9e..3497ca2 100644 --- a/confetti-box/src/main.rs +++ b/confetti-box/src/main.rs @@ -42,10 +42,10 @@ async fn main() { }; let database = Arc::new(RwLock::new( - Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"), + Mochibase::open_or_new(&config.database_path.to_str().expect("Couldn't convert db path to str")).expect("Failed to open or create database"), )); let chunkbase = Arc::new(RwLock::new(Chunkbase::default())); - let local_db = database.clone(); + // let local_db = database.clone(); let local_chunk = chunkbase.clone(); let (shutdown, rx) = tokio::sync::broadcast::channel(1); @@ -104,13 +104,13 @@ async fn main() { shutdown.send(()).expect("Failed to stop cleaner thread."); info!("Stopping database cleaning thread completed successfully."); - info!("Saving database on shutdown..."); - local_db - .write() - .unwrap() - .save() - .expect("Failed to save database"); - info!("Saving database completed successfully."); + // info!("Saving database on shutdown..."); + // local_db + // .write() + // .unwrap() + // .save() + // .expect("Failed to save database"); + // info!("Saving database completed successfully."); info!("Deleting chunk data on shutdown..."); local_chunk diff --git a/migrations/2025-05-22-234448_create_files/up.sql b/migrations/2025-05-22-234448_create_files/up.sql index ca843b7..59db67b 100644 --- a/migrations/2025-05-22-234448_create_files/up.sql +++ b/migrations/2025-05-22-234448_create_files/up.sql @@ -4,7 +4,7 @@ CREATE TABLE mochifiles ( mmid TEXT PRIMARY KEY NOT NULL, name TEXT NOT NULL, mime_type TEXT NOT NULL, - hash TEXT NOT NULL UNIQUE, + hash BLOB NOT NULL, upload_datetime DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, expiry_datetime DATETIME NOT NULL )