mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-06-22 22:53:02 -05:00
re implemented blake3 Hash
This commit is contained in:
parent
5763e3d6f8
commit
def6f1a689
5 changed files with 72 additions and 57 deletions
|
@ -7,7 +7,7 @@ use std::{
|
||||||
use blake3::Hash;
|
use blake3::Hash;
|
||||||
use chrono::{DateTime, NaiveDateTime, TimeDelta, Utc};
|
use chrono::{DateTime, NaiveDateTime, TimeDelta, Utc};
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
use log::{error, info, warn};
|
use log::{info, warn};
|
||||||
use rand::distributions::{Alphanumeric, DistString};
|
use rand::distributions::{Alphanumeric, DistString};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
form::{self, FromFormField, ValueField},
|
form::{self, FromFormField, ValueField},
|
||||||
|
@ -16,17 +16,18 @@ use rocket::{
|
||||||
use serde_with::serde_as;
|
use serde_with::serde_as;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use diesel::prelude::*;
|
use diesel::{expression::AsExpression, prelude::*, serialize::ToSql, sql_types::Binary, sqlite::Sqlite};
|
||||||
|
|
||||||
pub struct Mochibase {
|
pub struct Mochibase {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
/// connection to the db
|
/// connection to the db
|
||||||
db: Arc<Mutex<SqliteConnection>>,
|
pub db: Arc<Mutex<SqliteConnection>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Mochibase {
|
impl Mochibase {
|
||||||
/// Open the database from a path, **or create it if it does not exist**
|
/// Open the database from a path, **or create it if it does not exist**
|
||||||
pub fn open_or_new<P: AsRef<str>>(path: &P) -> Result<Self, io::Error> {
|
pub fn open_or_new<P: AsRef<str>>(path: &P) -> Result<Self, io::Error> {
|
||||||
|
println!("Open / New");
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
let connection = SqliteConnection::establish(path.as_ref())
|
let connection = SqliteConnection::establish(path.as_ref())
|
||||||
.unwrap_or_else(|e| panic!("Failed to connect, error: {}", e));
|
.unwrap_or_else(|e| panic!("Failed to connect, error: {}", e));
|
||||||
|
@ -42,6 +43,7 @@ impl Mochibase {
|
||||||
///
|
///
|
||||||
/// If the database already contained this value, then `false` is returned.
|
/// If the database already contained this value, then `false` is returned.
|
||||||
pub fn insert(&mut self, mmid_: &Mmid, entry: MochiFile) -> bool {
|
pub fn insert(&mut self, mmid_: &Mmid, entry: MochiFile) -> bool {
|
||||||
|
println!("Insert");
|
||||||
use schema::mochifiles::dsl::*;
|
use schema::mochifiles::dsl::*;
|
||||||
|
|
||||||
let hash_matched_mmids: Vec<Mmid> = mochifiles
|
let hash_matched_mmids: Vec<Mmid> = mochifiles
|
||||||
|
@ -54,7 +56,7 @@ impl Mochibase {
|
||||||
if hash_matched_mmids.contains(mmid_) {
|
if hash_matched_mmids.contains(mmid_) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
entry.insert_into(mochifiles).on_conflict_do_nothing();
|
entry.insert_into(mochifiles).on_conflict_do_nothing().execute(&mut *self.db.lock().unwrap()).unwrap();
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
@ -63,6 +65,7 @@ impl Mochibase {
|
||||||
///
|
///
|
||||||
/// If the database did not contain this value, then `false` is returned.
|
/// If the database did not contain this value, then `false` is returned.
|
||||||
pub fn remove_mmid(&mut self, mmid_: &Mmid) -> bool {
|
pub fn remove_mmid(&mut self, mmid_: &Mmid) -> bool {
|
||||||
|
println!("Remove mmid");
|
||||||
use schema::mochifiles::dsl::*;
|
use schema::mochifiles::dsl::*;
|
||||||
|
|
||||||
if diesel::delete(mochifiles.filter(mmid.eq(mmid_))).execute(&mut *self.db.lock().unwrap()).expect("Error deleting posts") > 0 {
|
if diesel::delete(mochifiles.filter(mmid.eq(mmid_))).execute(&mut *self.db.lock().unwrap()).expect("Error deleting posts") > 0 {
|
||||||
|
@ -72,34 +75,22 @@ impl Mochibase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove a hash from the database entirely.
|
|
||||||
///
|
|
||||||
/// Will not remove (returns [`Some(false)`] if hash contains references.
|
|
||||||
pub fn remove_hash(&mut self, hash: &Hash) -> Option<bool> {
|
|
||||||
if let Some(s) = self.hashes.get(hash) {
|
|
||||||
if s.is_empty() {
|
|
||||||
self.hashes.remove(hash);
|
|
||||||
Some(true)
|
|
||||||
} else {
|
|
||||||
Some(false)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks if a hash contained in the database contains no more [`Mmid`]s.
|
/// Checks if a hash contained in the database contains no more [`Mmid`]s.
|
||||||
pub fn is_hash_empty(&self, hash: &Hash) -> Option<bool> {
|
pub fn is_hash_valid(&self, hash_: &MHash) -> bool {
|
||||||
self.hashes.get(hash).map(|s| s.is_empty())
|
println!("Is Hash Valid?");
|
||||||
|
use schema::mochifiles::dsl::*;
|
||||||
|
!mochifiles.filter(hash.eq(hash_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).unwrap().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get an entry by its [`Mmid`]. Returns [`None`] if the value does not exist.
|
/// Get an entry by its [`Mmid`]. Returns [`None`] if the value does not exist.
|
||||||
pub fn get(&self, mmid_: &Mmid) -> Option<MochiFile> {
|
pub fn get(&self, mmid_: &Mmid) -> Option<MochiFile> {
|
||||||
|
println!("get mmid: {mmid_:?}");
|
||||||
use schema::mochifiles::dsl::*;
|
use schema::mochifiles::dsl::*;
|
||||||
mochifiles.filter(mmid.eq(mmid_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).unwrap().get(0).map(|f| f.clone())
|
mochifiles.filter(mmid.eq(mmid_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).unwrap().get(0).map(|f| f.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_hash(&self, hash_: &String) -> Option<Vec<MochiFile>> {
|
pub fn get_hash(&self, hash_: &MHash) -> Option<Vec<MochiFile>> {
|
||||||
|
println!("get hash: {hash_:?}");
|
||||||
use schema::mochifiles::dsl::*;
|
use schema::mochifiles::dsl::*;
|
||||||
let files = mochifiles.filter(hash.eq(hash_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load mochifiles by hash");
|
let files = mochifiles.filter(hash.eq(hash_)).select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load mochifiles by hash");
|
||||||
if files.is_empty() {
|
if files.is_empty() {
|
||||||
|
@ -111,7 +102,33 @@ impl Mochibase {
|
||||||
|
|
||||||
pub fn entries(&self) -> Vec<MochiFile> {
|
pub fn entries(&self) -> Vec<MochiFile> {
|
||||||
use schema::mochifiles::dsl::*;
|
use schema::mochifiles::dsl::*;
|
||||||
mochifiles.select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load all mochifiles")
|
dbg!(mochifiles.select(MochiFile::as_select()).load(&mut *self.db.lock().unwrap()).expect("failed to load all mochifiles"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, AsExpression)]
|
||||||
|
#[diesel(sql_type = Binary)]
|
||||||
|
pub struct MHash(pub Hash);
|
||||||
|
|
||||||
|
impl Queryable<Binary, Sqlite> for MHash {
|
||||||
|
type Row = *const [u8];
|
||||||
|
|
||||||
|
fn build(row: Self::Row) -> diesel::deserialize::Result<Self> {
|
||||||
|
let mut val = [0u8;32];
|
||||||
|
val.copy_from_slice(unsafe { row.as_ref().unwrap() });
|
||||||
|
Ok(MHash(Hash::from_bytes(val)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToSql<Binary, Sqlite> for MHash {
|
||||||
|
fn to_sql<'b>(&'b self, out: &mut diesel::serialize::Output<'b, '_, Sqlite>) -> diesel::serialize::Result {
|
||||||
|
<[u8; 32] as ToSql<Binary, diesel::sqlite::Sqlite>>::to_sql(self.0.as_bytes(), out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToString for MHash {
|
||||||
|
fn to_string(&self) -> String {
|
||||||
|
self.0.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,7 +148,7 @@ pub struct MochiFile {
|
||||||
mime_type: String,
|
mime_type: String,
|
||||||
|
|
||||||
/// The Blake3 hash of the file
|
/// The Blake3 hash of the file
|
||||||
hash: String,
|
hash: MHash,
|
||||||
|
|
||||||
/// The datetime when the file was uploaded
|
/// The datetime when the file was uploaded
|
||||||
upload_datetime: chrono::NaiveDateTime,
|
upload_datetime: chrono::NaiveDateTime,
|
||||||
|
@ -147,7 +164,7 @@ impl MochiFile {
|
||||||
mmid: Mmid,
|
mmid: Mmid,
|
||||||
name: String,
|
name: String,
|
||||||
mime_type: String,
|
mime_type: String,
|
||||||
hash: String,
|
hash: Hash,
|
||||||
upload: NaiveDateTime,
|
upload: NaiveDateTime,
|
||||||
expiry: NaiveDateTime,
|
expiry: NaiveDateTime,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
@ -155,7 +172,7 @@ impl MochiFile {
|
||||||
mmid,
|
mmid,
|
||||||
name,
|
name,
|
||||||
mime_type,
|
mime_type,
|
||||||
hash,
|
hash: MHash(hash),
|
||||||
upload_datetime: upload,
|
upload_datetime: upload,
|
||||||
expiry_datetime: expiry,
|
expiry_datetime: expiry,
|
||||||
}
|
}
|
||||||
|
@ -174,7 +191,7 @@ impl MochiFile {
|
||||||
datetime > self.expiry_datetime.and_utc()
|
datetime > self.expiry_datetime.and_utc()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash(&self) -> &String {
|
pub fn hash(&self) -> &MHash {
|
||||||
&self.hash
|
&self.hash
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,9 +214,10 @@ pub fn clean_database(db: &Arc<RwLock<Mochibase>>, file_path: &Path) {
|
||||||
// Add expired entries to the removal list
|
// Add expired entries to the removal list
|
||||||
let files_to_remove: Vec<_> = database
|
let files_to_remove: Vec<_> = database
|
||||||
.entries()
|
.entries()
|
||||||
|
.iter()
|
||||||
.filter_map(|e| {
|
.filter_map(|e| {
|
||||||
if e.is_expired() {
|
if e.is_expired() {
|
||||||
Some((e.mmid().clone(), *e.hash()))
|
Some((e.mmid().clone(), e.hash().clone()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -209,24 +227,21 @@ pub fn clean_database(db: &Arc<RwLock<Mochibase>>, file_path: &Path) {
|
||||||
let mut removed_files = 0;
|
let mut removed_files = 0;
|
||||||
let mut removed_entries = 0;
|
let mut removed_entries = 0;
|
||||||
for e in &files_to_remove {
|
for e in &files_to_remove {
|
||||||
if database.remove_mmid(&e.0) {
|
|
||||||
removed_entries += 1;
|
if !database.is_hash_valid(&e.1) {
|
||||||
}
|
|
||||||
if database.is_hash_empty(&e.1).is_some_and(|b| b) {
|
|
||||||
database.remove_hash(&e.1);
|
|
||||||
if let Err(e) = fs::remove_file(file_path.join(e.1.to_string())) {
|
if let Err(e) = fs::remove_file(file_path.join(e.1.to_string())) {
|
||||||
warn!("Failed to remove expired hash: {}", e);
|
warn!("Failed to remove expired hash: {}", e);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
if database.remove_mmid(&e.0) {
|
||||||
|
removed_entries += 1;
|
||||||
|
}
|
||||||
removed_files += 1;
|
removed_files += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Cleaned database.\n\t| Removed {removed_entries} expired entries.\n\t| Removed {removed_files} no longer referenced files.");
|
info!("Cleaned database.\n\t| Removed {removed_entries} expired entries.\n\t| Removed {removed_files} no longer referenced files.");
|
||||||
|
|
||||||
if let Err(e) = database.save() {
|
|
||||||
error!("Failed to save database: {e}")
|
|
||||||
}
|
|
||||||
drop(database); // Just to be sure
|
drop(database); // Just to be sure
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ diesel::table! {
|
||||||
mmid -> Text,
|
mmid -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
mime_type -> Text,
|
mime_type -> Text,
|
||||||
hash -> Text,
|
hash -> Binary,
|
||||||
upload_datetime -> Timestamp,
|
upload_datetime -> Timestamp,
|
||||||
expiry_datetime -> Timestamp,
|
expiry_datetime -> Timestamp,
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use crate::{
|
||||||
strings::to_pretty_time,
|
strings::to_pretty_time,
|
||||||
};
|
};
|
||||||
use chrono::{TimeDelta, Utc};
|
use chrono::{TimeDelta, Utc};
|
||||||
use database::{Chunkbase, ChunkedInfo, Mmid, MochiFile, Mochibase};
|
use database::{Chunkbase, ChunkedInfo, MHash, Mmid, MochiFile, Mochibase};
|
||||||
use maud::{html, Markup, PreEscaped};
|
use maud::{html, Markup, PreEscaped};
|
||||||
use rocket::{
|
use rocket::{
|
||||||
data::ToByteUnit, futures::{SinkExt as _, StreamExt as _}, get, post, serde::{json::{self, Json}, Serialize}, tokio::{
|
data::ToByteUnit, futures::{SinkExt as _, StreamExt as _}, get, post, serde::{json::{self, Json}, Serialize}, tokio::{
|
||||||
|
@ -217,7 +217,7 @@ pub async fn chunked_upload_finish(
|
||||||
// If the hash does not exist in the database,
|
// If the hash does not exist in the database,
|
||||||
// move the file to the backend, else, delete it
|
// move the file to the backend, else, delete it
|
||||||
// This also removes it from the chunk database
|
// This also removes it from the chunk database
|
||||||
if main_db.read().unwrap().get_hash(&hash).is_none() {
|
if main_db.read().unwrap().get_hash(&MHash(hash)).is_none() {
|
||||||
chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?;
|
chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?;
|
||||||
} else {
|
} else {
|
||||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||||
|
@ -231,8 +231,8 @@ pub async fn chunked_upload_finish(
|
||||||
chunked_info.1.name,
|
chunked_info.1.name,
|
||||||
file_type.media_type().to_string(),
|
file_type.media_type().to_string(),
|
||||||
hash,
|
hash,
|
||||||
now,
|
now.naive_utc(),
|
||||||
now + chunked_info.1.expire_duration,
|
(now + chunked_info.1.expire_duration).naive_utc(),
|
||||||
);
|
);
|
||||||
|
|
||||||
main_db
|
main_db
|
||||||
|
@ -322,8 +322,8 @@ pub async fn websocket_upload(
|
||||||
// If the hash does not exist in the database,
|
// If the hash does not exist in the database,
|
||||||
// move the file to the backend, else, delete it
|
// move the file to the backend, else, delete it
|
||||||
// This also removes it from the chunk database
|
// This also removes it from the chunk database
|
||||||
if main_db.read().unwrap().get_hash(&hash).is_none() {
|
if main_db.read().unwrap().get_hash(&MHash(hash)).is_none() {
|
||||||
chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?;
|
dbg!(chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename))?;
|
||||||
} else {
|
} else {
|
||||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||||
}
|
}
|
||||||
|
@ -336,8 +336,8 @@ pub async fn websocket_upload(
|
||||||
info.1.name,
|
info.1.name,
|
||||||
file_type.media_type().to_string(),
|
file_type.media_type().to_string(),
|
||||||
hash,
|
hash,
|
||||||
now,
|
now.naive_utc(),
|
||||||
now + info.1.expire_duration,
|
(now + info.1.expire_duration).naive_utc(),
|
||||||
);
|
);
|
||||||
|
|
||||||
main_db
|
main_db
|
||||||
|
|
|
@ -42,10 +42,10 @@ async fn main() {
|
||||||
};
|
};
|
||||||
|
|
||||||
let database = Arc::new(RwLock::new(
|
let database = Arc::new(RwLock::new(
|
||||||
Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"),
|
Mochibase::open_or_new(&config.database_path.to_str().expect("Couldn't convert db path to str")).expect("Failed to open or create database"),
|
||||||
));
|
));
|
||||||
let chunkbase = Arc::new(RwLock::new(Chunkbase::default()));
|
let chunkbase = Arc::new(RwLock::new(Chunkbase::default()));
|
||||||
let local_db = database.clone();
|
// let local_db = database.clone();
|
||||||
let local_chunk = chunkbase.clone();
|
let local_chunk = chunkbase.clone();
|
||||||
|
|
||||||
let (shutdown, rx) = tokio::sync::broadcast::channel(1);
|
let (shutdown, rx) = tokio::sync::broadcast::channel(1);
|
||||||
|
@ -104,13 +104,13 @@ async fn main() {
|
||||||
shutdown.send(()).expect("Failed to stop cleaner thread.");
|
shutdown.send(()).expect("Failed to stop cleaner thread.");
|
||||||
info!("Stopping database cleaning thread completed successfully.");
|
info!("Stopping database cleaning thread completed successfully.");
|
||||||
|
|
||||||
info!("Saving database on shutdown...");
|
// info!("Saving database on shutdown...");
|
||||||
local_db
|
// local_db
|
||||||
.write()
|
// .write()
|
||||||
.unwrap()
|
// .unwrap()
|
||||||
.save()
|
// .save()
|
||||||
.expect("Failed to save database");
|
// .expect("Failed to save database");
|
||||||
info!("Saving database completed successfully.");
|
// info!("Saving database completed successfully.");
|
||||||
|
|
||||||
info!("Deleting chunk data on shutdown...");
|
info!("Deleting chunk data on shutdown...");
|
||||||
local_chunk
|
local_chunk
|
||||||
|
|
|
@ -4,7 +4,7 @@ CREATE TABLE mochifiles (
|
||||||
mmid TEXT PRIMARY KEY NOT NULL,
|
mmid TEXT PRIMARY KEY NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
mime_type TEXT NOT NULL,
|
mime_type TEXT NOT NULL,
|
||||||
hash TEXT NOT NULL UNIQUE,
|
hash BLOB NOT NULL,
|
||||||
upload_datetime DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
upload_datetime DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
expiry_datetime DATETIME NOT NULL
|
expiry_datetime DATETIME NOT NULL
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue