Switched to better database lookup system

This commit is contained in:
G2-Games 2024-10-27 06:31:00 -05:00
parent ebada71e7a
commit aaaea89502
6 changed files with 76 additions and 35 deletions

19
Cargo.lock generated
View file

@ -236,6 +236,7 @@ dependencies = [
"bincode", "bincode",
"blake3", "blake3",
"chrono", "chrono",
"file-format",
"log", "log",
"maud", "maud",
"rand", "rand",
@ -380,9 +381,9 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]] [[package]]
name = "encoding_rs" name = "encoding_rs"
version = "0.8.34" version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
] ]
@ -423,6 +424,12 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "file-format"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ffe3a660c3a1b10e96f304a9413d673b2118d62e4520f7ddf4a4faccfe8b9b9"
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@ -983,9 +990,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.14" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff"
[[package]] [[package]]
name = "pin-utils" name = "pin-utils"
@ -1133,9 +1140,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.11.0" version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",

View file

@ -7,6 +7,7 @@ edition = "2021"
bincode = { version = "2.0.0-rc.3", features = ["serde"] } bincode = { version = "2.0.0-rc.3", features = ["serde"] }
blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] } blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
file-format = { version = "0.25.0", features = ["reader"] }
log = "0.4" log = "0.4"
maud = { version = "0.26", features = ["rocket"] } maud = { version = "0.26", features = ["rocket"] }
rand = "0.8.5" rand = "0.8.5"

View file

@ -5,6 +5,7 @@ use std::{
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode}; use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
use blake3::Hash; use blake3::Hash;
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use file_format::FileFormat;
use log::{info, warn}; use log::{info, warn};
use rand::distributions::{Alphanumeric, DistString}; use rand::distributions::{Alphanumeric, DistString};
use rocket::{ use rocket::{
@ -146,6 +147,9 @@ pub struct MochiFile {
/// The original name of the file /// The original name of the file
name: String, name: String,
/// The format the file is, for serving
extension: String,
/// The Blake3 hash of the file /// The Blake3 hash of the file
#[bincode(with_serde)] #[bincode(with_serde)]
hash: Hash, hash: Hash,
@ -164,6 +168,7 @@ impl MochiFile {
pub fn new_with_expiry( pub fn new_with_expiry(
mmid: Mmid, mmid: Mmid,
name: String, name: String,
extension: &str,
hash: Hash, hash: Hash,
expire_duration: TimeDelta, expire_duration: TimeDelta,
) -> Self { ) -> Self {
@ -173,6 +178,7 @@ impl MochiFile {
Self { Self {
mmid, mmid,
name, name,
extension: extension.to_string(),
hash, hash,
upload_datetime: current, upload_datetime: current,
expiry_datetime: expiry, expiry_datetime: expiry,
@ -199,6 +205,10 @@ impl MochiFile {
pub fn mmid(&self) -> &Mmid { pub fn mmid(&self) -> &Mmid {
&self.mmid &self.mmid
} }
pub fn extension(&self) -> &String {
&self.extension
}
} }
/// Clean the database. Removes files which are past their expiry /// Clean the database. Removes files which are past their expiry
@ -283,8 +293,18 @@ impl Mmid {
} }
} }
impl From<&str> for Mmid { impl TryFrom<&str> for Mmid {
fn from(value: &str) -> Self { type Error = ();
Self(value.to_owned())
fn try_from(value: &str) -> Result<Self, Self::Error> {
if value.len() != 8 {
return Err(())
}
if value.chars().any(|c| !c.is_ascii_alphanumeric()) {
return Err(())
}
Ok(Self(value.to_owned()))
} }
} }

View file

@ -1,11 +1,11 @@
use std::sync::{Arc, RwLock}; use std::sync::{Arc, RwLock};
use rocket::{ use rocket::{
fs::NamedFile, get, serde::{self, json::Json}, State fs::NamedFile, get, http::ContentType, serde::{self, json::Json}, tokio::fs::File, State
}; };
use serde::Serialize; use serde::Serialize;
use crate::{database::Database, settings::Settings}; use crate::{database::{Database, Mmid}, settings::Settings};
/// An endpoint to obtain information about the server's capabilities /// An endpoint to obtain information about the server's capabilities
#[get("/info")] #[get("/info")]
@ -34,20 +34,28 @@ pub struct ServerInfo {
allowed_durations: Vec<u32>, allowed_durations: Vec<u32>,
} }
/// Look up the hash of a file to find it. This only returns the first /// Look up the [`Mmid`] of a file to find it.
/// hit for a hash, so different filenames may not be found. #[get("/f/<mmid>")]
#[get("/f/<id>")]
pub async fn lookup( pub async fn lookup(
db: &State<Arc<RwLock<Database>>>, db: &State<Arc<RwLock<Database>>>,
settings: &State<Settings>, settings: &State<Settings>,
id: &str mmid: &str
) -> Option<NamedFile> { ) -> Option<(ContentType, NamedFile)> {
dbg!(db.read().unwrap()); let mmid: Mmid = match mmid.try_into() {
let entry = if let Some(e) = db.read().unwrap().get(&id.into()).cloned() { Ok(v) => v,
Err(_) => return None,
};
let entry = if let Some(e) = db.read().unwrap().get(&mmid).cloned() {
e e
} else { } else {
return None return None
}; };
NamedFile::open(settings.file_dir.join(entry.hash().to_string())).await.ok() let file = NamedFile::open(settings.file_dir.join(entry.hash().to_string())).await.ok()?;
Some((
ContentType::from_extension(entry.extension()).unwrap_or(ContentType::Binary),
file
))
} }

View file

@ -3,7 +3,6 @@ mod endpoints;
mod settings; mod settings;
mod strings; mod strings;
mod utils; mod utils;
mod file_server;
use std::{ use std::{
fs, fs,
@ -127,9 +126,7 @@ async fn handle_upload(
db: &State<Arc<RwLock<Database>>>, db: &State<Arc<RwLock<Database>>>,
settings: &State<Settings>, settings: &State<Settings>,
) -> Result<Json<ClientResponse>, std::io::Error> { ) -> Result<Json<ClientResponse>, std::io::Error> {
let mut temp_dir = settings.temp_dir.clone(); // Ensure the expiry time is valid, if not return an error
let mut out_path = settings.file_dir.clone();
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
return Ok(Json(ClientResponse::failure("Duration not allowed"))); return Ok(Json(ClientResponse::failure("Duration not allowed")));
@ -144,7 +141,6 @@ async fn handle_upload(
return Ok(Json(ClientResponse::failure("Duration invalid"))); return Ok(Json(ClientResponse::failure("Duration invalid")));
}; };
// TODO: Properly sanitize this...
let raw_name = file_data let raw_name = file_data
.file .file
.raw_name() .raw_name()
@ -153,20 +149,31 @@ async fn handle_upload(
.as_str() .as_str()
.to_string(); .to_string();
// Get temp path and hash it // Get temp path for the file
temp_dir.push(Uuid::new_v4().to_string()); let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
let temp_filename = temp_dir;
file_data.file.persist_to(&temp_filename).await?; file_data.file.persist_to(&temp_filename).await?;
// Get hash and random identifier
let file_mmid = Mmid::new();
let file_hash = hash_file(&temp_filename).await?; let file_hash = hash_file(&temp_filename).await?;
let file_mmid = Mmid::new(); // Process filetype
out_path.push(file_hash.to_string()); let file_type = file_format::FileFormat::from_file(&temp_filename)?;
let constructed_file = let constructed_file =
MochiFile::new_with_expiry(file_mmid.clone(), raw_name, file_hash, expire_time); MochiFile::new_with_expiry(
file_mmid.clone(),
raw_name,
file_type.extension(),
file_hash,
expire_time
);
// Move it to the new proper place // Move it to the new proper place
std::fs::rename(temp_filename, out_path)?; std::fs::rename(
temp_filename,
settings.file_dir.join(file_hash.to_string())
)?;
db.write() db.write()
.unwrap() .unwrap()
@ -177,7 +184,7 @@ async fn handle_upload(
status: true, status: true,
name: constructed_file.name().clone(), name: constructed_file.name().clone(),
mmid: Some(file_mmid), mmid: Some(file_mmid),
hash: file_hash.to_hex()[0..10].to_string(), hash: file_hash.to_string(),
expires: Some(constructed_file.expiry()), expires: Some(constructed_file.expiry()),
..Default::default() ..Default::default()
})) }))
@ -244,7 +251,7 @@ async fn main() {
tokio::spawn({ tokio::spawn({
let cleaner_db = database.clone(); let cleaner_db = database.clone();
let file_path = config.file_dir.clone(); let file_path = config.file_dir.clone();
async move { clean_loop(cleaner_db, file_path, rx, TimeDelta::seconds(10)).await } async move { clean_loop(cleaner_db, file_path, rx, TimeDelta::minutes(2)).await }
}); });
let rocket = rocket::build() let rocket = rocket::build()

View file

@ -42,13 +42,11 @@ function getDroppedFiles(evt) {
}); });
} }
console.log(files);
return files; return files;
} }
async function fileSend(files, duration, maxSize) { async function fileSend(files, duration, maxSize) {
for (const file of files) { for (const file of files) {
console.log(file);
const [linkRow, progressBar, progressText] = addNewToList(file.name); const [linkRow, progressBar, progressText] = addNewToList(file.name);
if (file.size > maxSize) { if (file.size > maxSize) {
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT); makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);