Merge pull request #1 from G2-Games/database_redo

Redo database and fix issues
This commit is contained in:
G2 2024-10-27 17:44:19 -05:00 committed by GitHub
commit 3a82415bef
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 289 additions and 2162 deletions

2
.gitignore vendored
View file

@ -4,3 +4,5 @@
temp_files temp_files
*.mochi *.mochi
settings.toml settings.toml
Cargo.lock

2020
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -7,8 +7,10 @@ edition = "2021"
bincode = { version = "2.0.0-rc.3", features = ["serde"] } bincode = { version = "2.0.0-rc.3", features = ["serde"] }
blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] } blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
file-format = { version = "0.25.0", features = ["reader"] }
log = "0.4" log = "0.4"
maud = { version = "0.26", features = ["rocket"] } maud = { version = "0.26", features = ["rocket"] }
rand = "0.8.5"
rocket = { version = "0.5", features = ["json"] } rocket = { version = "0.5", features = ["json"] }
serde = { version = "1.0.213", features = ["derive"] } serde = { version = "1.0.213", features = ["derive"] }
serde_with = { version = "3.11.0", features = ["chrono_0_4"] } serde_with = { version = "3.11.0", features = ["chrono_0_4"] }

View file

@ -1,5 +1,6 @@
use std::{ use std::{
collections::HashMap, collections::{hash_map::Values, HashMap, HashSet},
ffi::OsStr,
fs::{self, File}, fs::{self, File},
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::{Arc, RwLock}, sync::{Arc, RwLock},
@ -9,6 +10,7 @@ use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write
use blake3::Hash; use blake3::Hash;
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use log::{info, warn}; use log::{info, warn};
use rand::distributions::{Alphanumeric, DistString};
use rocket::{ use rocket::{
serde::{Deserialize, Serialize}, serde::{Deserialize, Serialize},
tokio::{select, sync::mpsc::Receiver, time}, tokio::{select, sync::mpsc::Receiver, time},
@ -19,8 +21,14 @@ const BINCODE_CFG: Configuration = bincode::config::standard();
#[derive(Debug, Clone, Decode, Encode)] #[derive(Debug, Clone, Decode, Encode)]
pub struct Database { pub struct Database {
path: PathBuf, path: PathBuf,
/// Every hash in the database along with the [`Mmid`]s associated with them
#[bincode(with_serde)] #[bincode(with_serde)]
pub files: HashMap<MochiKey, MochiFile>, hashes: HashMap<Hash, HashSet<Mmid>>,
/// All entries in the database
#[bincode(with_serde)]
entries: HashMap<Mmid, MochiFile>,
} }
impl Database { impl Database {
@ -29,7 +37,8 @@ impl Database {
let output = Self { let output = Self {
path: path.as_ref().to_path_buf(), path: path.as_ref().to_path_buf(),
files: HashMap::new(), entries: HashMap::new(),
hashes: HashMap::new(),
}; };
encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!"); encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!");
@ -37,6 +46,7 @@ impl Database {
output output
} }
/// Open the database from a path, **or create it if it does not exist**
pub fn open<P: AsRef<Path>>(path: &P) -> Self { pub fn open<P: AsRef<Path>>(path: &P) -> Self {
if !path.as_ref().exists() { if !path.as_ref().exists() {
Self::new(path) Self::new(path)
@ -46,6 +56,7 @@ impl Database {
} }
} }
/// Save the database to its file
pub fn save(&self) { pub fn save(&self) {
let mut out_path = self.path.clone(); let mut out_path = self.path.clone();
out_path.set_extension(".bkp"); out_path.set_extension(".bkp");
@ -54,18 +65,90 @@ impl Database {
fs::rename(out_path, &self.path).unwrap(); fs::rename(out_path, &self.path).unwrap();
} }
/// Insert a [`MochiFile`] into the database.
///
/// If the database already contained this value, then `false` is returned.
pub fn insert(&mut self, entry: MochiFile) -> bool {
if let Some(s) = self.hashes.get_mut(&entry.hash) {
// If the database already contains the hash, make sure the file is unique
if !s.insert(entry.mmid.clone()) {
return false;
}
} else {
// If the database does not contain the hash, create a new set for it
self.hashes
.insert(entry.hash, HashSet::from([entry.mmid.clone()]));
}
self.entries.insert(entry.mmid.clone(), entry.clone());
true
}
/// Remove an [`Mmid`] from the database entirely.
///
/// If the database did not contain this value, then `false` is returned.
pub fn remove_mmid(&mut self, mmid: &Mmid) -> bool {
let hash = if let Some(h) = self.entries.get(mmid).map(|e| e.hash) {
self.entries.remove(mmid);
h
} else {
return false;
};
if let Some(s) = self.hashes.get_mut(&hash) {
s.remove(mmid);
}
true
}
/// Remove a hash from the database entirely.
///
/// Will not remove (returns [`Some(false)`] if hash contains references.
pub fn remove_hash(&mut self, hash: &Hash) -> Option<bool> {
if let Some(s) = self.hashes.get(hash) {
if s.is_empty() {
self.hashes.remove(hash);
Some(true)
} else {
Some(false)
}
} else {
None
}
}
/// Checks if a hash contained in the database contains no more [`Mmid`]s.
pub fn is_hash_empty(&self, hash: &Hash) -> Option<bool> {
self.hashes.get(hash).map(|s| s.is_empty())
}
/// Get an entry by its [`Mmid`]. Returns [`None`] if the value does not exist.
pub fn get(&self, mmid: &Mmid) -> Option<&MochiFile> {
self.entries.get(mmid)
}
pub fn entries(&self) -> Values<'_, Mmid, MochiFile> {
self.entries.values()
}
} }
/// An entry in the database storing metadata about a file
#[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize)] #[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize)]
#[serde(crate = "rocket::serde")] #[serde(crate = "rocket::serde")]
pub struct MochiFile { pub struct MochiFile {
/// A unique identifier describing this file
mmid: Mmid,
/// The original name of the file /// The original name of the file
name: String, name: String,
/// The location on disk (for deletion and management) /// The format the file is, for serving
filename: PathBuf, extension: String,
/// The hashed contents of the file as a Blake3 hash /// The Blake3 hash of the file
#[bincode(with_serde)] #[bincode(with_serde)]
hash: Hash, hash: Hash,
@ -81,17 +164,19 @@ pub struct MochiFile {
impl MochiFile { impl MochiFile {
/// Create a new file that expires in `expiry`. /// Create a new file that expires in `expiry`.
pub fn new_with_expiry( pub fn new_with_expiry(
name: &str, mmid: Mmid,
name: String,
extension: &str,
hash: Hash, hash: Hash,
filename: PathBuf,
expire_duration: TimeDelta, expire_duration: TimeDelta,
) -> Self { ) -> Self {
let current = Utc::now(); let current = Utc::now();
let expiry = current + expire_duration; let expiry = current + expire_duration;
Self { Self {
name: name.to_string(), mmid,
filename, name,
extension: extension.to_string(),
hash, hash,
upload_datetime: current, upload_datetime: current,
expiry_datetime: expiry, expiry_datetime: expiry,
@ -102,22 +187,11 @@ impl MochiFile {
&self.name &self.name
} }
pub fn path(&self) -> &PathBuf { pub fn expiry(&self) -> DateTime<Utc> {
&self.filename
}
pub fn get_key(&self) -> MochiKey {
MochiKey {
name: self.name.clone(),
hash: self.hash,
}
}
pub fn get_expiry(&self) -> DateTime<Utc> {
self.expiry_datetime self.expiry_datetime
} }
pub fn expired(&self) -> bool { pub fn is_expired(&self) -> bool {
let datetime = Utc::now(); let datetime = Utc::now();
datetime > self.expiry_datetime datetime > self.expiry_datetime
} }
@ -125,64 +199,58 @@ impl MochiFile {
pub fn hash(&self) -> &Hash { pub fn hash(&self) -> &Hash {
&self.hash &self.hash
} }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Decode, Encode, Deserialize, Serialize)] pub fn mmid(&self) -> &Mmid {
#[serde(crate = "rocket::serde")] &self.mmid
pub struct MochiKey { }
name: String,
#[bincode(with_serde)] pub fn extension(&self) -> &String {
hash: Hash, &self.extension
}
} }
/// Clean the database. Removes files which are past their expiry /// Clean the database. Removes files which are past their expiry
/// [`chrono::DateTime`]. Also removes files which no longer exist on the disk. /// [`chrono::DateTime`]. Also removes files which no longer exist on the disk.
fn clean_database(db: &Arc<RwLock<Database>>) { fn clean_database(db: &Arc<RwLock<Database>>, file_path: &Path) {
let mut database = db.write().unwrap(); let mut database = db.write().unwrap();
// Add expired entries to the removal list
let files_to_remove: Vec<_> = database let files_to_remove: Vec<_> = database
.files .entries()
.iter()
.filter_map(|e| { .filter_map(|e| {
if e.1.expired() { if e.is_expired() {
// Check if the entry has expired Some((e.mmid().clone(), *e.hash()))
Some((e.0.clone(), e.1.clone()))
} else if !e.1.path().try_exists().is_ok_and(|r| r) {
// Check if the entry exists
Some((e.0.clone(), e.1.clone()))
} else { } else {
None None
} }
}) })
.collect(); .collect();
let mut expired = 0; let mut removed_files = 0;
let mut missing = 0; let mut removed_entries = 0;
for file in &files_to_remove { for e in &files_to_remove {
let path = file.1.path(); if database.remove_mmid(&e.0) {
// If the path does not exist, there's no reason to try to remove it. removed_entries += 1;
if path.try_exists().is_ok_and(|r| r) { }
match fs::remove_file(path) { if database.is_hash_empty(&e.1).is_some_and(|b| b) {
Ok(_) => (), database.remove_hash(&e.1);
Err(e) => warn!("Failed to delete path at {:?}: {e}", path), if let Err(e) = fs::remove_file(file_path.join(e.1.to_string())) {
} warn!("Failed to remove expired hash: {}", e);
expired += 1; } else {
} else { removed_files += 1;
missing += 1 }
} }
database.files.remove(&file.0);
} }
info!( info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files.");
"{} expired and {} missing items cleared from database",
expired, missing
);
database.save(); database.save();
} }
/// A loop to clean the database periodically. /// A loop to clean the database periodically.
pub async fn clean_loop( pub async fn clean_loop(
db: Arc<RwLock<Database>>, db: Arc<RwLock<Database>>,
file_path: PathBuf,
mut shutdown_signal: Receiver<()>, mut shutdown_signal: Receiver<()>,
interval: TimeDelta, interval: TimeDelta,
) { ) {
@ -190,8 +258,73 @@ pub async fn clean_loop(
loop { loop {
select! { select! {
_ = interval.tick() => clean_database(&db), _ = interval.tick() => clean_database(&db, &file_path),
_ = shutdown_signal.recv() => break, _ = shutdown_signal.recv() => break,
}; };
} }
} }
/// A unique identifier for an entry in the database, 8 characters long,
/// consists of ASCII alphanumeric characters (`a-z`, `A-Z`, and `0-9`).
#[derive(Debug, PartialEq, Eq, Clone, Decode, Encode, Hash, Deserialize, Serialize)]
pub struct Mmid(String);
impl Mmid {
/// Create a new random MMID
pub fn new() -> Self {
let string = Alphanumeric.sample_string(&mut rand::thread_rng(), 8);
Self(string)
}
}
impl TryFrom<&str> for Mmid {
type Error = ();
fn try_from(value: &str) -> Result<Self, Self::Error> {
if value.len() != 8 {
return Err(());
}
if value.chars().any(|c| !c.is_ascii_alphanumeric()) {
return Err(());
}
Ok(Self(value.to_owned()))
}
}
impl TryFrom<&Path> for Mmid {
type Error = ();
fn try_from(value: &Path) -> Result<Self, Self::Error> {
value.as_os_str().try_into()
}
}
impl TryFrom<&OsStr> for Mmid {
type Error = ();
fn try_from(value: &OsStr) -> Result<Self, Self::Error> {
let string = match value.to_str() {
Some(p) => p,
None => return Err(()),
};
if string.len() != 8 {
return Err(());
}
if string.chars().any(|c| !c.is_ascii_alphanumeric()) {
return Err(());
}
Ok(Self(string.to_owned()))
}
}
impl std::fmt::Display for Mmid {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}

View file

@ -2,14 +2,18 @@ use std::sync::{Arc, RwLock};
use rocket::{ use rocket::{
get, get,
http::RawStr, http::ContentType,
response::{status::NotFound, Redirect}, response::Redirect,
serde::{self, json::Json}, serde::{self, json::Json},
State, tokio::fs::File,
uri, State,
}; };
use serde::Serialize; use serde::Serialize;
use crate::{database::Database, get_id, settings::Settings}; use crate::{
database::{Database, Mmid},
settings::Settings,
};
/// An endpoint to obtain information about the server's capabilities /// An endpoint to obtain information about the server's capabilities
#[get("/info")] #[get("/info")]
@ -38,17 +42,41 @@ pub struct ServerInfo {
allowed_durations: Vec<u32>, allowed_durations: Vec<u32>,
} }
/// Look up the hash of a file to find it. This only returns the first /// Look up the [`Mmid`] of a file to find it.
/// hit for a hash, so different filenames may not be found. #[get("/f/<mmid>")]
#[get("/f/<id>")] pub async fn lookup_mmid(db: &State<Arc<RwLock<Database>>>, mmid: &str) -> Option<Redirect> {
pub fn lookup(db: &State<Arc<RwLock<Database>>>, id: &str) -> Result<Redirect, NotFound<()>> { let mmid: Mmid = mmid.try_into().ok()?;
for file in db.read().unwrap().files.values() { let entry = db.read().unwrap().get(&mmid).cloned()?;
if file.hash().to_hex()[0..10].to_string() == id {
let filename = get_id(file.name(), *file.hash()); Some(Redirect::to(uri!(lookup_mmid_name(
let filename = RawStr::new(&filename).percent_encode().to_string(); mmid.to_string(),
return Ok(Redirect::to(format!("/files/{}", filename))); entry.name()
} ))))
}
/// Look up the [`Mmid`] of a file to find it.
#[get("/f/<mmid>/<name>")]
pub async fn lookup_mmid_name(
db: &State<Arc<RwLock<Database>>>,
settings: &State<Settings>,
mmid: &str,
name: &str,
) -> Option<(ContentType, File)> {
let mmid: Mmid = mmid.try_into().ok()?;
let entry = db.read().unwrap().get(&mmid).cloned()?;
// If the name does not match, then this is invalid
if name != entry.name() {
return None;
} }
Err(NotFound(())) let file = File::open(settings.file_dir.join(entry.hash().to_string()))
.await
.ok()?;
Some((
ContentType::from_extension(entry.extension()).unwrap_or(ContentType::Binary),
file,
))
} }

View file

@ -10,8 +10,8 @@ use std::{
}; };
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use database::{clean_loop, Database, MochiFile}; use database::{clean_loop, Database, Mmid, MochiFile};
use endpoints::{lookup, server_info}; use endpoints::{lookup_mmid, lookup_mmid_name, server_info};
use log::info; use log::info;
use maud::{html, Markup, PreEscaped, DOCTYPE}; use maud::{html, Markup, PreEscaped, DOCTYPE};
use rocket::{ use rocket::{
@ -28,7 +28,7 @@ use rocket::{
}; };
use settings::Settings; use settings::Settings;
use strings::{parse_time_string, to_pretty_time}; use strings::{parse_time_string, to_pretty_time};
use utils::{get_id, hash_file}; use utils::hash_file;
use uuid::Uuid; use uuid::Uuid;
fn head(page_title: &str) -> Markup { fn head(page_title: &str) -> Markup {
@ -86,9 +86,9 @@ fn home(settings: &State<Settings>) -> Markup {
} }
form #uploadForm { form #uploadForm {
// It's stupid how these can't be styled so they're just hidden here... // It's stupid how these can't be styled so they're just hidden here...
input id="fileInput" type="file" name="fileUpload" multiple input #fileInput type="file" name="fileUpload" multiple
onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;"; onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;";
input id="fileDuration" type="text" name="duration" minlength="2" input #fileDuration type="text" name="duration" minlength="2"
maxlength="7" value=(settings.duration.default.num_seconds().to_string() + "s") style="display:none;"; maxlength="7" value=(settings.duration.default.num_seconds().to_string() + "s") style="display:none;";
} }
hr; hr;
@ -102,7 +102,7 @@ fn home(settings: &State<Settings>) -> Markup {
footer { footer {
p {a href="https://github.com/G2-Games/confetti-box" {"Source"}} p {a href="https://github.com/G2-Games/confetti-box" {"Source"}}
p {a href="https://g2games.dev/" {"My Website"}} p {a href="https://g2games.dev/" {"My Website"}}
p {a href="#" {"Links"}} p {a href="api" {"API Info"}}
p {a href="#" {"Go"}} p {a href="#" {"Go"}}
p {a href="#" {"Here"}} p {a href="#" {"Here"}}
} }
@ -126,27 +126,22 @@ async fn handle_upload(
db: &State<Arc<RwLock<Database>>>, db: &State<Arc<RwLock<Database>>>,
settings: &State<Settings>, settings: &State<Settings>,
) -> Result<Json<ClientResponse>, std::io::Error> { ) -> Result<Json<ClientResponse>, std::io::Error> {
let mut temp_dir = settings.temp_dir.clone(); // Ensure the expiry time is valid, if not return an error
let mut out_path = settings.file_dir.clone();
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
if t > settings.duration.maximum {
return Ok(Json(ClientResponse::failure(
"Duration larger than maximum",
)));
}
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
return Ok(Json(ClientResponse::failure("Duration not allowed"))); return Ok(Json(ClientResponse::failure("Duration not allowed")));
} }
if t > settings.duration.maximum {
return Ok(Json(ClientResponse::failure("Duration larger than max")));
}
t t
} else { } else {
return Ok(Json(ClientResponse::failure("Duration invalid"))); return Ok(Json(ClientResponse::failure("Duration invalid")));
}; };
// TODO: Properly sanitize this... let raw_name = file_data
let raw_name = &*file_data
.file .file
.raw_name() .raw_name()
.unwrap() .unwrap()
@ -154,51 +149,37 @@ async fn handle_upload(
.as_str() .as_str()
.to_string(); .to_string();
// Get temp path and hash it // Get temp path for the file
temp_dir.push(Uuid::new_v4().to_string()); let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
let temp_filename = temp_dir;
file_data.file.persist_to(&temp_filename).await?; file_data.file.persist_to(&temp_filename).await?;
let hash = hash_file(&temp_filename).await?;
let filename = get_id(raw_name, hash); // Get hash and random identifier
out_path.push(filename.clone()); let file_mmid = Mmid::new();
let file_hash = hash_file(&temp_filename).await?;
let constructed_file = // Process filetype
MochiFile::new_with_expiry(raw_name, hash, out_path.clone(), expire_time); let file_type = file_format::FileFormat::from_file(&temp_filename)?;
if !settings.overwrite let constructed_file = MochiFile::new_with_expiry(
&& db file_mmid.clone(),
.read() raw_name,
.unwrap() file_type.extension(),
.files file_hash,
.contains_key(&constructed_file.get_key()) expire_time,
{ );
info!("Key already in DB, NOT ADDING");
return Ok(Json(ClientResponse {
status: true,
response: "File already exists",
name: constructed_file.name().clone(),
url: filename,
hash: hash.to_hex()[0..10].to_string(),
expires: Some(constructed_file.get_expiry()),
}));
}
// Move it to the new proper place // Move it to the new proper place
std::fs::rename(temp_filename, out_path)?; std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
db.write() db.write().unwrap().insert(constructed_file.clone());
.unwrap()
.files
.insert(constructed_file.get_key(), constructed_file.clone());
db.write().unwrap().save(); db.write().unwrap().save();
Ok(Json(ClientResponse { Ok(Json(ClientResponse {
status: true, status: true,
name: constructed_file.name().clone(), name: constructed_file.name().clone(),
url: filename, mmid: Some(file_mmid),
hash: hash.to_hex()[0..10].to_string(), hash: file_hash.to_string(),
expires: Some(constructed_file.get_expiry()), expires: Some(constructed_file.expiry()),
..Default::default() ..Default::default()
})) }))
} }
@ -214,8 +195,8 @@ struct ClientResponse {
#[serde(skip_serializing_if = "str::is_empty")] #[serde(skip_serializing_if = "str::is_empty")]
pub name: String, pub name: String,
#[serde(skip_serializing_if = "str::is_empty")] #[serde(skip_serializing_if = "Option::is_none")]
pub url: String, pub mmid: Option<Mmid>,
#[serde(skip_serializing_if = "str::is_empty")] #[serde(skip_serializing_if = "str::is_empty")]
pub hash: String, pub hash: String,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -263,7 +244,8 @@ async fn main() {
let (shutdown, rx) = tokio::sync::mpsc::channel(1); let (shutdown, rx) = tokio::sync::mpsc::channel(1);
tokio::spawn({ tokio::spawn({
let cleaner_db = database.clone(); let cleaner_db = database.clone();
async move { clean_loop(cleaner_db, rx, TimeDelta::minutes(2)).await } let file_path = config.file_dir.clone();
async move { clean_loop(cleaner_db, file_path, rx, TimeDelta::minutes(2)).await }
}); });
let rocket = rocket::build() let rocket = rocket::build()
@ -276,7 +258,8 @@ async fn main() {
stylesheet, stylesheet,
server_info, server_info,
favicon, favicon,
lookup lookup_mmid,
lookup_mmid_name,
], ],
) )
.mount( .mount(
@ -295,12 +278,14 @@ async fn main() {
// Ensure the server gracefully shuts down // Ensure the server gracefully shuts down
rocket.expect("Server failed to shutdown gracefully"); rocket.expect("Server failed to shutdown gracefully");
info!("Stopping database cleaning thread"); info!("Stopping database cleaning thread...");
shutdown shutdown
.send(()) .send(())
.await .await
.expect("Failed to stop cleaner thread"); .expect("Failed to stop cleaner thread.");
info!("Stopping database cleaning thread completed successfully.");
info!("Saving database on shutdown..."); info!("Saving database on shutdown...");
local_db.write().unwrap().save(); local_db.write().unwrap().save();
info!("Saving database completed successfully.");
} }

View file

@ -1,11 +1,6 @@
use blake3::Hash; use blake3::Hash;
use std::path::Path; use std::path::Path;
/// Get a filename based on the file's hashed name
pub fn get_id(name: &str, hash: Hash) -> String {
hash.to_hex()[0..10].to_string() + "_" + name
}
/// Get the Blake3 hash of a file, without reading it all into memory, and also get the size /// Get the Blake3 hash of a file, without reading it all into memory, and also get the size
pub async fn hash_file<P: AsRef<Path>>(input: &P) -> Result<Hash, std::io::Error> { pub async fn hash_file<P: AsRef<Path>>(input: &P) -> Result<Hash, std::io::Error> {
let mut hasher = blake3::Hasher::new(); let mut hasher = blake3::Hasher::new();

View file

@ -42,13 +42,11 @@ function getDroppedFiles(evt) {
}); });
} }
console.log(files);
return files; return files;
} }
async function fileSend(files, duration, maxSize) { async function fileSend(files, duration, maxSize) {
for (const file of files) { for (const file of files) {
console.log(file);
const [linkRow, progressBar, progressText] = addNewToList(file.name); const [linkRow, progressBar, progressText] = addNewToList(file.name);
if (file.size > maxSize) { if (file.size > maxSize) {
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT); makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
@ -90,22 +88,24 @@ function makeErrored(progressBar, progressText, linkRow, errorMessage) {
linkRow.style.background = "#ffb2ae"; linkRow.style.background = "#ffb2ae";
} }
function makeFinished(progressBar, progressText, linkRow, linkAddress, hash) { function makeFinished(progressBar, progressText, linkRow, response) {
progressText.textContent = ""; progressText.textContent = "";
const name = encodeURIComponent(response.name);
const link = progressText.appendChild(document.createElement("a")); const link = progressText.appendChild(document.createElement("a"));
link.textContent = hash; link.textContent = response.mmid;
link.href = "/files/" + linkAddress; link.href = "/f/" + response.mmid;
link.target = "_blank"; link.target = "_blank";
let button = linkRow.appendChild(document.createElement("button")); let button = linkRow.appendChild(document.createElement("button"));
button.textContent = "📝"; button.textContent = "📝";
let buttonTimeout = null; let buttonTimeout = null;
button.addEventListener('click', function(_e) { button.addEventListener('click', function(_e) {
const mmid = response.mmid;
if (buttonTimeout) { if (buttonTimeout) {
clearTimeout(buttonTimeout) clearTimeout(buttonTimeout)
} }
navigator.clipboard.writeText( navigator.clipboard.writeText(
encodeURI(window.location.protocol + "//" + window.location.host + "/files/" + linkAddress) window.location.protocol + "//" + window.location.host + "/f/" + mmid
) )
button.textContent = "✅"; button.textContent = "✅";
buttonTimeout = setTimeout(function() { buttonTimeout = setTimeout(function() {
@ -143,7 +143,7 @@ function uploadComplete(response, progressBar, progressText, linkRow) {
if (response.status) { if (response.status) {
console.log("Successfully uploaded file", response); console.log("Successfully uploaded file", response);
makeFinished(progressBar, progressText, linkRow, response.url, response.hash); makeFinished(progressBar, progressText, linkRow, response);
} else { } else {
console.error("Error uploading", response); console.error("Error uploading", response);
makeErrored(progressBar, progressText, linkRow, response.response); makeErrored(progressBar, progressText, linkRow, response.response);
@ -179,6 +179,8 @@ async function initEverything() {
if (this.classList.contains("selected")) { if (this.classList.contains("selected")) {
return return
} }
document.getElementById("uploadForm").elements["duration"].value
= this.dataset.durationSeconds + "s";
let selected = this.parentNode.getElementsByClassName("selected"); let selected = this.parentNode.getElementsByClassName("selected");
selected[0].classList.remove("selected"); selected[0].classList.remove("selected");
this.classList.add("selected"); this.classList.add("selected");