Compare commits

..

No commits in common. "46397a1f0316848c5cfc4ab8ff8c7a29e4f95d37" and "37b6838866cb58146e4215b88ecbae9660c66f19" have entirely different histories.

5 changed files with 45 additions and 86 deletions

View file

@ -9,7 +9,6 @@ blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
file-format = { version = "0.25.0", features = ["reader"] } file-format = { version = "0.25.0", features = ["reader"] }
log = "0.4" log = "0.4"
lz4_flex = "0.11.3"
maud = { version = "0.26", features = ["rocket"] } maud = { version = "0.26", features = ["rocket"] }
rand = "0.8.5" rand = "0.8.5"
rocket = { version = "0.5", features = ["json"] } rocket = { version = "0.5", features = ["json"] }
@ -20,8 +19,6 @@ uuid = { version = "1.11.0", features = ["v4"] }
[profile.production] [profile.production]
inherits = "release" inherits = "release"
strip = true
lto = true lto = true
opt-level = "z" opt-level = "z"
codegen-units = 1 codegen-units = 1
panic = "abort"

View file

@ -2,25 +2,9 @@
A super simple file host. Inspired by [Catbox](https://catbox.moe) and [Uguu](https://uguu.se). A super simple file host. Inspired by [Catbox](https://catbox.moe) and [Uguu](https://uguu.se).
## Features ## Features
### Current - Entirely self contained, single binary
- Entirely self contained, tiny (~4MB) single binary - Customizable using a simple TOML config file
- Customizable using a simple config file - ??? (TODO: Add more)
- Only stores one copy of a given hash on the backend
- Fast (enough), runs just fine on a Raspberry Pi
- Simple API for interfacing with it programmatically
- No database setup required, uses self-contained in memory database
serialized to a small, LZ4 compressed file.
### Planned
- Chunked uploads
- Theming
- More mochi
## Screenshot
<p align="center">
<img width="500px" src="https://github.com/user-attachments/assets/2ca05da6-ce5e-4cde-a3b0-453ccdd33264">
<p align="center"><i>An example of a running instance</i></p>
</p>
## License ## License
Confetti-Box is licensed under the terms of the GNU AGPL-3.0 license. Do what you want Confetti-Box is licensed under the terms of the GNU AGPL-3.0 license. Do what you want

View file

@ -1,11 +1,15 @@
use std::{ use std::{
collections::{hash_map::Values, HashMap, HashSet}, ffi::OsStr, fs::{self, File}, io, path::{Path, PathBuf}, sync::{Arc, RwLock} collections::{hash_map::Values, HashMap, HashSet},
ffi::OsStr,
fs::{self, File},
path::{Path, PathBuf},
sync::{Arc, RwLock},
}; };
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode}; use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
use blake3::Hash; use blake3::Hash;
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use log::{error, info, warn}; use log::{info, warn};
use rand::distributions::{Alphanumeric, DistString}; use rand::distributions::{Alphanumeric, DistString};
use rocket::{ use rocket::{
serde::{Deserialize, Serialize}, serde::{Deserialize, Serialize},
@ -28,52 +32,38 @@ pub struct Database {
} }
impl Database { impl Database {
pub fn new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> { pub fn new<P: AsRef<Path>>(path: &P) -> Self {
let mut file = File::create_new(path).expect("Could not create database!");
let output = Self { let output = Self {
path: path.as_ref().to_path_buf(), path: path.as_ref().to_path_buf(),
entries: HashMap::new(), entries: HashMap::new(),
hashes: HashMap::new(), hashes: HashMap::new(),
}; };
// Save the database initially after creating it encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!");
output.save()?;
Ok(output) output
}
/// Open the database from a path
pub fn open<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
let file = File::open(path)?;
let mut lz4_file = lz4_flex::frame::FrameDecoder::new(file);
decode_from_std_read(&mut lz4_file, BINCODE_CFG)
.map_err(|e| io::Error::other(format!("failed to open database: {e}")))
} }
/// Open the database from a path, **or create it if it does not exist** /// Open the database from a path, **or create it if it does not exist**
pub fn open_or_new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> { pub fn open<P: AsRef<Path>>(path: &P) -> Self {
if !path.as_ref().exists() { if !path.as_ref().exists() {
Self::new(path) Self::new(path)
} else { } else {
Self::open(path) let mut file = File::open(path).expect("Could not get database file!");
decode_from_std_read(&mut file, BINCODE_CFG).expect("Could not decode database")
} }
} }
/// Save the database to its file /// Save the database to its file
pub fn save(&self) -> Result<(), io::Error> { pub fn save(&self) {
// Create a file and write the LZ4 compressed stream into it let mut out_path = self.path.clone();
let file = File::create(&self.path.with_extension("bkp"))?; out_path.set_extension(".bkp");
let mut lz4_file = lz4_flex::frame::FrameEncoder::new(file); let mut file = File::create(&out_path).expect("Could not save!");
encode_into_std_write(self, &mut lz4_file, BINCODE_CFG) encode_into_std_write(self, &mut file, BINCODE_CFG).expect("Could not write out!");
.map_err(|e| io::Error::other(format!("failed to save database: {e}")))?;
lz4_file.try_finish()?;
fs::rename( fs::rename(out_path, &self.path).unwrap();
self.path.with_extension("bkp"),
&self.path
).unwrap();
Ok(())
} }
/// Insert a [`MochiFile`] into the database. /// Insert a [`MochiFile`] into the database.
@ -140,10 +130,6 @@ impl Database {
self.entries.get(mmid) self.entries.get(mmid)
} }
pub fn get_hash(&self, hash: &Hash) -> Option<&HashSet<Mmid>> {
self.hashes.get(hash)
}
pub fn entries(&self) -> Values<'_, Mmid, MochiFile> { pub fn entries(&self) -> Values<'_, Mmid, MochiFile> {
self.entries.values() self.entries.values()
} }
@ -177,20 +163,22 @@ pub struct MochiFile {
impl MochiFile { impl MochiFile {
/// Create a new file that expires in `expiry`. /// Create a new file that expires in `expiry`.
pub fn new( pub fn new_with_expiry(
mmid: Mmid, mmid: Mmid,
name: String, name: String,
extension: &str, extension: &str,
hash: Hash, hash: Hash,
upload: DateTime<Utc>, expire_duration: TimeDelta,
expiry: DateTime<Utc>,
) -> Self { ) -> Self {
let current = Utc::now();
let expiry = current + expire_duration;
Self { Self {
mmid, mmid,
name, name,
extension: extension.to_string(), extension: extension.to_string(),
hash, hash,
upload_datetime: upload, upload_datetime: current,
expiry_datetime: expiry, expiry_datetime: expiry,
} }
} }
@ -256,9 +244,7 @@ fn clean_database(db: &Arc<RwLock<Database>>, file_path: &Path) {
info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files."); info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files.");
if let Err(e) = database.save() { database.save();
error!("Failed to save database: {e}")
}
drop(database); // Just to be sure drop(database); // Just to be sure
} }

View file

@ -101,7 +101,6 @@ async fn handle_upload(
db: &State<Arc<RwLock<Database>>>, db: &State<Arc<RwLock<Database>>>,
settings: &State<Settings>, settings: &State<Settings>,
) -> Result<Json<ClientResponse>, std::io::Error> { ) -> Result<Json<ClientResponse>, std::io::Error> {
let current = Utc::now();
// Ensure the expiry time is valid, if not return an error // Ensure the expiry time is valid, if not return an error
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
@ -129,37 +128,31 @@ async fn handle_upload(
let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string()); let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
file_data.file.persist_to(&temp_filename).await?; file_data.file.persist_to(&temp_filename).await?;
// Get hash and random identifier and expiry // Get hash and random identifier
let file_mmid = Mmid::new(); let file_mmid = Mmid::new();
let file_hash = hash_file(&temp_filename).await?; let file_hash = hash_file(&temp_filename).await?;
let expiry = current + expire_time;
// Process filetype // Process filetype
let file_type = file_format::FileFormat::from_file(&temp_filename)?; let file_type = file_format::FileFormat::from_file(&temp_filename)?;
let constructed_file = MochiFile::new( let constructed_file = MochiFile::new_with_expiry(
file_mmid.clone(), file_mmid.clone(),
raw_name, raw_name,
file_type.extension(), file_type.extension(),
file_hash, file_hash,
current, expire_time,
expiry
); );
// If the hash does not exist in the database, move the file to the backend, else, delete it // Move it to the new proper place
if db.read().unwrap().get_hash(&file_hash).is_none() { std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
} else {
std::fs::remove_file(temp_filename)?;
}
db.write().unwrap().insert(&file_mmid, constructed_file.clone()); db.write().unwrap().insert(&file_mmid, constructed_file.clone());
Ok(Json(ClientResponse { Ok(Json(ClientResponse {
status: true, status: true,
name: constructed_file.name().clone(), name: constructed_file.name().clone(),
mmid: Some(constructed_file.mmid().clone()), mmid: Some(file_mmid),
hash: constructed_file.hash().to_string(), hash: file_hash.to_string(),
expires: Some(constructed_file.expiry()), expires: Some(constructed_file.expiry()),
..Default::default() ..Default::default()
})) }))
@ -218,7 +211,7 @@ async fn main() {
..Default::default() ..Default::default()
}; };
let database = Arc::new(RwLock::new(Database::open_or_new(&config.database_path).expect("Failed to open or create database"))); let database = Arc::new(RwLock::new(Database::open(&config.database_path)));
let local_db = database.clone(); let local_db = database.clone();
// Start monitoring thread, cleaning the database every 2 minutes // Start monitoring thread, cleaning the database every 2 minutes
@ -262,6 +255,6 @@ async fn main() {
info!("Stopping database cleaning thread completed successfully."); info!("Stopping database cleaning thread completed successfully.");
info!("Saving database on shutdown..."); info!("Saving database on shutdown...");
local_db.write().unwrap().save().expect("Failed to save database"); local_db.write().unwrap().save();
info!("Saving database completed successfully."); info!("Saving database completed successfully.");
} }

View file

@ -45,12 +45,13 @@ async function pasteSubmit(evt) {
const form = document.getElementById("uploadForm"); const form = document.getElementById("uploadForm");
const duration = form.elements.duration.value; const duration = form.elements.duration.value;
evt.preventDefault();
const files = []; const files = [];
const len = evt.clipboardData.files.length; [...evt.clipboardData.files].forEach((file, _) => {
for (let i = 0; i < len; i++) { // If dropped items aren't files, reject them
const file = evt.clipboardData.files[i];
files.push(file); files.push(file);
} });
await sendFile(files, duration); await sendFile(files, duration);
} }
@ -203,9 +204,7 @@ document.addEventListener("DOMContentLoaded", function(_event) {
form.addEventListener("submit", formSubmit); form.addEventListener("submit", formSubmit);
// Respond to file paste events // Respond to file paste events
window.addEventListener("paste", (event) => { window.addEventListener("paste", (event) => {pasteSubmit(event)});
pasteSubmit(event)
});
// Respond to drag and drop stuff // Respond to drag and drop stuff
let fileButton = document.getElementById("fileButton"); let fileButton = document.getElementById("fileButton");