mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 15:22:57 -05:00
Compare commits
No commits in common. "main" and "0.1.2" have entirely different histories.
29 changed files with 719 additions and 3142 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -5,6 +5,4 @@ temp_files
|
|||
*.mochi
|
||||
settings.toml
|
||||
|
||||
test/
|
||||
|
||||
Cargo.lock
|
||||
|
|
30
Cargo.toml
30
Cargo.toml
|
@ -1,20 +1,24 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"confetti-box",
|
||||
"confetti-cli",
|
||||
]
|
||||
[package]
|
||||
name = "confetti_box"
|
||||
version = "0.1.2"
|
||||
edition = "2021"
|
||||
|
||||
[workspace.package]
|
||||
authors = ["G2-Games <ke0bhogsg@gmail.com>", "MrDulfin"]
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "forbid"
|
||||
[dependencies]
|
||||
bincode = { version = "2.0.0-rc.3", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
file-format = { version = "0.25.0", features = ["reader"] }
|
||||
log = "0.4"
|
||||
maud = { version = "0.26", features = ["rocket"] }
|
||||
rand = "0.8.5"
|
||||
rocket = { version = "0.5", features = ["json"] }
|
||||
serde = { version = "1.0.213", features = ["derive"] }
|
||||
serde_with = { version = "3.11.0", features = ["chrono_0_4"] }
|
||||
toml = "0.8.19"
|
||||
uuid = { version = "1.11.0", features = ["v4"] }
|
||||
|
||||
[profile.production]
|
||||
inherits = "release"
|
||||
strip = true
|
||||
lto = true
|
||||
opt-level = "z"
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
|
30
README.md
30
README.md
|
@ -1,29 +1,11 @@
|
|||
# Confetti-Box 🎉
|
||||
A super simple file host. Inspired by [Catbox](https://catbox.moe) and
|
||||
[Uguu](https://uguu.se).
|
||||
A super simple file host. Inspired by [Catbox](https://catbox.moe) and [Uguu](https://uguu.se).
|
||||
|
||||
## Features
|
||||
### Current
|
||||
- Entirely self contained, tiny (~4MB) single binary
|
||||
- Customizable using a simple config file
|
||||
- Only stores one copy of a given hash on the backend
|
||||
- Chunked uploads of configurable size
|
||||
- Websocket uploads
|
||||
- Fast (enough), runs just fine on a Raspberry Pi
|
||||
- Simple API for interfacing with it programmatically
|
||||
- No database setup required, uses self-contained in memory database
|
||||
serialized to a small, LZ4 compressed file.
|
||||
|
||||
### Planned
|
||||
- Theming
|
||||
- More mochi
|
||||
|
||||
## Screenshot
|
||||
<p align="center">
|
||||
<img width="500px" src="./images/Confetti-Box Screenshot.png">
|
||||
<p align="center"><i>An example of a running instance</i></p>
|
||||
</p>
|
||||
- Entirely self contained, single binary
|
||||
- Customizable using a simple TOML config file
|
||||
- ??? (TODO: Add more)
|
||||
|
||||
## License
|
||||
Confetti-Box is licensed under the terms of the GNU AGPL-3.0 license. Do what
|
||||
you want with it within the terms of that license.
|
||||
Confetti-Box is licensed under the terms of the GNU AGPL-3.0 license. Do what you want
|
||||
with it within the terms of that license.
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
[package]
|
||||
name = "confetti_box"
|
||||
version = "0.2.2"
|
||||
repository = "https://github.com/Dangoware/confetti-box"
|
||||
license = "AGPL-3.0-or-later"
|
||||
authors.workspace = true
|
||||
edition = "2024"
|
||||
build = "build.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
blake3 = { version = "1.5", features = ["mmap", "rayon", "serde"] }
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
ciborium = "0.2"
|
||||
file-format = { version = "0.26", features = ["reader"] }
|
||||
log = "0.4"
|
||||
maud = { version = "0.27", features = ["rocket"] }
|
||||
rand = "0.8"
|
||||
rocket = { version = "0.5", features = ["json"] }
|
||||
rocket_ws = "0.1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_with = { version = "3.11", features = ["chrono_0_4"] }
|
||||
toml = "0.8"
|
||||
unidecode = "0.3"
|
||||
urlencoding = "2.1"
|
||||
uuid = { version = "1.11", features = ["serde", "v4"] }
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
vergen-gix = { version = "1.0", features = ["build", "cargo", "rustc", "si"] }
|
|
@ -1,23 +0,0 @@
|
|||
use vergen_gix::{BuildBuilder, CargoBuilder, Emitter, GixBuilder, RustcBuilder, SysinfoBuilder};
|
||||
|
||||
fn main() {
|
||||
let build = BuildBuilder::all_build().unwrap();
|
||||
let cargo = CargoBuilder::all_cargo().unwrap();
|
||||
let gitcl = GixBuilder::all_git().unwrap();
|
||||
let rustc = RustcBuilder::all_rustc().unwrap();
|
||||
let si = SysinfoBuilder::all_sysinfo().unwrap();
|
||||
|
||||
Emitter::default()
|
||||
.add_instructions(&build)
|
||||
.unwrap()
|
||||
.add_instructions(&cargo)
|
||||
.unwrap()
|
||||
.add_instructions(&gitcl)
|
||||
.unwrap()
|
||||
.add_instructions(&rustc)
|
||||
.unwrap()
|
||||
.add_instructions(&si)
|
||||
.unwrap()
|
||||
.emit()
|
||||
.unwrap();
|
||||
}
|
|
@ -1,354 +0,0 @@
|
|||
pub mod database;
|
||||
pub mod endpoints;
|
||||
pub mod pages;
|
||||
pub mod resources;
|
||||
pub mod settings;
|
||||
pub mod strings;
|
||||
pub mod utils;
|
||||
|
||||
use std::{
|
||||
io::{self, ErrorKind},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
pages::{footer, head},
|
||||
settings::Settings,
|
||||
strings::to_pretty_time,
|
||||
};
|
||||
use chrono::{TimeDelta, Utc};
|
||||
use database::{Chunkbase, ChunkedInfo, Mmid, MochiFile, Mochibase};
|
||||
use maud::{html, Markup, PreEscaped};
|
||||
use rocket::{
|
||||
data::ToByteUnit, futures::{SinkExt as _, StreamExt as _}, get, post, serde::{json::{self, Json}, Serialize}, tokio::{
|
||||
fs, io::{AsyncSeekExt, AsyncWriteExt}
|
||||
}, Data, State
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[get("/")]
|
||||
pub fn home(settings: &State<Settings>) -> Markup {
|
||||
html! {
|
||||
(head("Confetti-Box"))
|
||||
script src="/resources/request.js" { }
|
||||
|
||||
center {
|
||||
h1 { "Confetti-Box 🎉" }
|
||||
h2 { "Files up to " (settings.max_filesize.bytes()) " in size are allowed!" }
|
||||
noscript { "Javascript must be enabled for this site to function!" }
|
||||
hr;
|
||||
button.main_file_upload #fileButton onclick="document.getElementById('fileInput').click()" {
|
||||
h4 { "Upload File(s)" }
|
||||
p { "Click, Paste, or Drag and Drop" }
|
||||
}
|
||||
h3 { "Expire after:" }
|
||||
div id="durationBox" {
|
||||
@for d in &settings.duration.allowed {
|
||||
button.button.{@if settings.duration.default == *d { "selected" }}
|
||||
data-duration-seconds=(d.num_seconds())
|
||||
{
|
||||
(PreEscaped(to_pretty_time(d.num_seconds() as u32)))
|
||||
}
|
||||
}
|
||||
}
|
||||
form #uploadForm {
|
||||
// It's stupid how these can't be styled so they're just hidden here...
|
||||
input #fileDuration type="text" name="duration" minlength="2"
|
||||
maxlength="7" value=(settings.duration.default.num_seconds().to_string()) style="display:none;";
|
||||
input #fileInput type="file" name="fileUpload" multiple
|
||||
onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;";
|
||||
}
|
||||
hr;
|
||||
|
||||
h3 { "Uploaded Files" }
|
||||
div #uploadedFilesDisplay {
|
||||
|
||||
}
|
||||
|
||||
hr;
|
||||
(footer())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ChunkedResponse {
|
||||
status: bool,
|
||||
message: String,
|
||||
|
||||
/// UUID used for associating the chunk with the final file
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
uuid: Option<Uuid>,
|
||||
|
||||
/// Valid max chunk size in bytes
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
chunk_size: Option<u64>,
|
||||
}
|
||||
|
||||
impl ChunkedResponse {
|
||||
fn failure(message: &str) -> Self {
|
||||
Self {
|
||||
status: false,
|
||||
message: message.to_string(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Start a chunked upload. Response contains all the info you need to continue
|
||||
/// uploading chunks.
|
||||
#[post("/upload/chunked", data = "<file_info>")]
|
||||
pub async fn chunked_upload_start(
|
||||
db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
file_info: Json<ChunkedInfo>,
|
||||
) -> Result<Json<ChunkedResponse>, std::io::Error> {
|
||||
// Perform some sanity checks
|
||||
if file_info.size > settings.max_filesize {
|
||||
return Ok(Json(ChunkedResponse::failure("File too large")));
|
||||
}
|
||||
if settings.duration.restrict_to_allowed
|
||||
&& !settings
|
||||
.duration
|
||||
.allowed
|
||||
.contains(&file_info.expire_duration)
|
||||
{
|
||||
return Ok(Json(ChunkedResponse::failure("Duration not allowed")));
|
||||
}
|
||||
if file_info.expire_duration > settings.duration.maximum {
|
||||
return Ok(Json(ChunkedResponse::failure("Duration too large")));
|
||||
}
|
||||
|
||||
let uuid = db.write().unwrap().new_file(
|
||||
file_info.0,
|
||||
&settings.temp_dir,
|
||||
TimeDelta::seconds(30)
|
||||
)?;
|
||||
|
||||
Ok(Json(ChunkedResponse {
|
||||
status: true,
|
||||
message: "".into(),
|
||||
uuid: Some(uuid),
|
||||
chunk_size: Some(settings.chunk_size),
|
||||
}))
|
||||
}
|
||||
|
||||
#[post("/upload/chunked/<uuid>?<chunk>", data = "<data>")]
|
||||
pub async fn chunked_upload_continue(
|
||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
data: Data<'_>,
|
||||
uuid: &str,
|
||||
chunk: u64,
|
||||
) -> Result<(), io::Error> {
|
||||
let uuid = Uuid::parse_str(uuid).map_err(io::Error::other)?;
|
||||
let data_stream = data.open((settings.chunk_size + 100).bytes());
|
||||
|
||||
let chunked_info = match chunk_db.read().unwrap().get_file(&uuid) {
|
||||
Some(s) => s.clone(),
|
||||
None => return Err(io::Error::other("Invalid UUID")),
|
||||
};
|
||||
|
||||
if chunked_info.1.recieved_chunks.contains(&chunk) {
|
||||
return Err(io::Error::new(ErrorKind::Other, "Chunk already uploaded"));
|
||||
}
|
||||
|
||||
let mut file = fs::File::options()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.truncate(false)
|
||||
.open(&chunked_info.1.path)
|
||||
.await?;
|
||||
|
||||
let offset = chunk * settings.chunk_size;
|
||||
if (offset > chunked_info.1.size) | (offset > settings.max_filesize) {
|
||||
return Err(io::Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
"Invalid chunk number for file",
|
||||
));
|
||||
}
|
||||
|
||||
file.seek(io::SeekFrom::Start(offset)).await?;
|
||||
let written = data_stream.stream_to(&mut file).await?.written;
|
||||
file.flush().await?;
|
||||
let position = file.stream_position().await?;
|
||||
|
||||
if written > settings.chunk_size {
|
||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||
return Err(io::Error::other("Wrote more than one chunk"));
|
||||
}
|
||||
if position > chunked_info.1.size {
|
||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||
return Err(io::Error::other("File larger than expected"));
|
||||
}
|
||||
|
||||
chunk_db.write().unwrap().add_recieved_chunk(&uuid, chunk);
|
||||
chunk_db.write().unwrap().extend_timeout(&uuid, TimeDelta::seconds(30));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Finalize a chunked upload
|
||||
#[get("/upload/chunked/<uuid>?finish")]
|
||||
pub async fn chunked_upload_finish(
|
||||
main_db: &State<Arc<RwLock<Mochibase>>>,
|
||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
uuid: &str,
|
||||
) -> Result<Json<MochiFile>, io::Error> {
|
||||
let now = Utc::now();
|
||||
let uuid = Uuid::parse_str(uuid).map_err(io::Error::other)?;
|
||||
let chunked_info = match chunk_db.read().unwrap().get_file(&uuid) {
|
||||
Some(s) => s.clone(),
|
||||
None => return Err(io::Error::other("Invalid UUID")),
|
||||
};
|
||||
|
||||
if !chunked_info.1.path.try_exists().is_ok_and(|e| e) {
|
||||
return Err(io::Error::other("File does not exist"));
|
||||
}
|
||||
|
||||
// Get file hash
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
hasher.update_mmap_rayon(&chunked_info.1.path).unwrap();
|
||||
let hash = hasher.finalize();
|
||||
let new_filename = settings.file_dir.join(hash.to_string());
|
||||
|
||||
// If the hash does not exist in the database,
|
||||
// move the file to the backend, else, delete it
|
||||
// This also removes it from the chunk database
|
||||
if main_db.read().unwrap().get_hash(&hash).is_none() {
|
||||
chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?;
|
||||
} else {
|
||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||
}
|
||||
|
||||
let mmid = Mmid::new_random();
|
||||
let file_type = file_format::FileFormat::from_file(&new_filename).unwrap();
|
||||
|
||||
let constructed_file = MochiFile::new(
|
||||
mmid.clone(),
|
||||
chunked_info.1.name,
|
||||
file_type.media_type().to_string(),
|
||||
hash,
|
||||
now,
|
||||
now + chunked_info.1.expire_duration,
|
||||
);
|
||||
|
||||
main_db
|
||||
.write()
|
||||
.unwrap()
|
||||
.insert(&mmid, constructed_file.clone());
|
||||
|
||||
Ok(Json(constructed_file))
|
||||
}
|
||||
|
||||
#[get("/upload/websocket?<name>&<size>&<duration>")]
|
||||
pub async fn websocket_upload(
|
||||
ws: rocket_ws::WebSocket,
|
||||
main_db: &State<Arc<RwLock<Mochibase>>>,
|
||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
name: String,
|
||||
size: u64,
|
||||
duration: i64, // Duration in seconds
|
||||
) -> Result<rocket_ws::Channel<'static>, Json<ChunkedResponse>> {
|
||||
let max_filesize = settings.max_filesize;
|
||||
let expire_duration = TimeDelta::seconds(duration);
|
||||
if size > max_filesize {
|
||||
return Err(Json(ChunkedResponse::failure("File too large")));
|
||||
}
|
||||
if settings.duration.restrict_to_allowed
|
||||
&& !settings
|
||||
.duration
|
||||
.allowed
|
||||
.contains(&expire_duration)
|
||||
{
|
||||
return Err(Json(ChunkedResponse::failure("Duration not allowed")));
|
||||
}
|
||||
if expire_duration > settings.duration.maximum {
|
||||
return Err(Json(ChunkedResponse::failure("Duration too large")));
|
||||
}
|
||||
|
||||
let file_info = ChunkedInfo {
|
||||
name,
|
||||
size,
|
||||
expire_duration,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let uuid = chunk_db.write().unwrap().new_file(
|
||||
file_info,
|
||||
&settings.temp_dir,
|
||||
TimeDelta::seconds(30)
|
||||
).map_err(|e| Json(ChunkedResponse::failure(e.to_string().as_str())))?;
|
||||
let info = chunk_db.read().unwrap().get_file(&uuid).unwrap().clone();
|
||||
|
||||
let chunk_db = Arc::clone(chunk_db);
|
||||
let main_db = Arc::clone(main_db);
|
||||
let file_dir = settings.file_dir.clone();
|
||||
let mut file = fs::File::create(&info.1.path).await.unwrap();
|
||||
|
||||
Ok(ws.channel(move |mut stream| Box::pin(async move {
|
||||
let mut offset = 0;
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
while let Some(message) = stream.next().await {
|
||||
if let Ok(m) = message.as_ref() {
|
||||
if m.is_empty() {
|
||||
// We're finished here
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let message = message.unwrap().into_data();
|
||||
offset += message.len() as u64;
|
||||
if (offset > info.1.size) | (offset > max_filesize) {
|
||||
break
|
||||
}
|
||||
|
||||
hasher.update(&message);
|
||||
|
||||
stream.send(rocket_ws::Message::binary(offset.to_le_bytes().as_slice())).await.unwrap();
|
||||
|
||||
file.write_all(&message).await.unwrap();
|
||||
|
||||
chunk_db.write().unwrap().extend_timeout(&uuid, TimeDelta::seconds(30));
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let hash = hasher.finalize();
|
||||
let new_filename = file_dir.join(hash.to_string());
|
||||
|
||||
// If the hash does not exist in the database,
|
||||
// move the file to the backend, else, delete it
|
||||
// This also removes it from the chunk database
|
||||
if main_db.read().unwrap().get_hash(&hash).is_none() {
|
||||
chunk_db.write().unwrap().move_and_remove_file(&uuid, &new_filename)?;
|
||||
} else {
|
||||
chunk_db.write().unwrap().remove_file(&uuid)?;
|
||||
}
|
||||
|
||||
let mmid = Mmid::new_random();
|
||||
let file_type = file_format::FileFormat::from_file(&new_filename).unwrap();
|
||||
|
||||
let constructed_file = MochiFile::new(
|
||||
mmid.clone(),
|
||||
info.1.name,
|
||||
file_type.media_type().to_string(),
|
||||
hash,
|
||||
now,
|
||||
now + info.1.expire_duration,
|
||||
);
|
||||
|
||||
main_db
|
||||
.write()
|
||||
.unwrap()
|
||||
.insert(&mmid, constructed_file.clone());
|
||||
|
||||
file.flush().await.unwrap();
|
||||
|
||||
stream.send(rocket_ws::Message::Text(json::serde_json::ser::to_string(&constructed_file).unwrap())).await?;
|
||||
stream.close(None).await?;
|
||||
|
||||
Ok(())
|
||||
})))
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
use std::{
|
||||
fs,
|
||||
path::PathBuf,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use chrono::TimeDelta;
|
||||
use confetti_box::{
|
||||
database::{clean_database, Chunkbase, Mochibase},
|
||||
endpoints, pages, resources,
|
||||
settings::Settings,
|
||||
};
|
||||
use log::info;
|
||||
use rocket::{
|
||||
data::ToByteUnit as _,
|
||||
routes,
|
||||
tokio::{self, select, sync::broadcast::Receiver, time},
|
||||
};
|
||||
|
||||
#[rocket::main]
|
||||
async fn main() {
|
||||
// Get or create config file
|
||||
let config = Settings::open(&"./settings.toml").expect("Could not open settings file");
|
||||
|
||||
if !config.temp_dir.try_exists().is_ok_and(|e| e) {
|
||||
fs::create_dir_all(config.temp_dir.clone()).expect("Failed to create temp directory");
|
||||
}
|
||||
|
||||
if !config.file_dir.try_exists().is_ok_and(|e| e) {
|
||||
fs::create_dir_all(config.file_dir.clone()).expect("Failed to create file directory");
|
||||
}
|
||||
|
||||
// Set rocket configuration settings
|
||||
let rocket_config = rocket::Config {
|
||||
address: config.server.address.parse().expect("IP address invalid"),
|
||||
port: config.server.port,
|
||||
temp_dir: config.temp_dir.clone().into(),
|
||||
limits: rocket::data::Limits::default()
|
||||
.limit("data-form", config.max_filesize.bytes())
|
||||
.limit("file", config.max_filesize.bytes()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let database = Arc::new(RwLock::new(
|
||||
Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"),
|
||||
));
|
||||
let chunkbase = Arc::new(RwLock::new(Chunkbase::default()));
|
||||
let local_db = database.clone();
|
||||
let local_chunk = chunkbase.clone();
|
||||
|
||||
let (shutdown, rx) = tokio::sync::broadcast::channel(1);
|
||||
// Clean the database every 2 minutes
|
||||
tokio::spawn({
|
||||
let cleaner_db = database.clone();
|
||||
let file_path = config.file_dir.clone();
|
||||
async move { clean_loop(cleaner_db, file_path, rx).await }
|
||||
});
|
||||
tokio::spawn({
|
||||
let chunk_db = local_chunk.clone();
|
||||
let rx2 = shutdown.subscribe();
|
||||
async move { clean_chunks(chunk_db, rx2).await }
|
||||
});
|
||||
|
||||
let rocket = rocket::build()
|
||||
.mount(
|
||||
config.server.root_path.clone() + "/",
|
||||
routes![
|
||||
confetti_box::home,
|
||||
pages::api_info,
|
||||
pages::about,
|
||||
resources::favicon_svg,
|
||||
resources::favicon_ico,
|
||||
resources::form_handler_js,
|
||||
resources::stylesheet,
|
||||
resources::font_static,
|
||||
],
|
||||
)
|
||||
.mount(
|
||||
config.server.root_path.clone() + "/",
|
||||
routes![
|
||||
confetti_box::chunked_upload_start,
|
||||
confetti_box::chunked_upload_continue,
|
||||
confetti_box::chunked_upload_finish,
|
||||
confetti_box::websocket_upload,
|
||||
endpoints::server_info,
|
||||
endpoints::file_info,
|
||||
endpoints::lookup_mmid,
|
||||
endpoints::lookup_mmid_noredir,
|
||||
endpoints::lookup_mmid_name,
|
||||
],
|
||||
)
|
||||
.manage(database)
|
||||
.manage(chunkbase)
|
||||
.manage(config)
|
||||
.configure(rocket_config)
|
||||
.launch()
|
||||
.await;
|
||||
|
||||
// Ensure the server gracefully shuts down
|
||||
rocket.expect("Server failed to shutdown gracefully");
|
||||
|
||||
info!("Stopping database cleaning thread...");
|
||||
shutdown.send(()).expect("Failed to stop cleaner thread.");
|
||||
info!("Stopping database cleaning thread completed successfully.");
|
||||
|
||||
info!("Saving database on shutdown...");
|
||||
local_db
|
||||
.write()
|
||||
.unwrap()
|
||||
.save()
|
||||
.expect("Failed to save database");
|
||||
info!("Saving database completed successfully.");
|
||||
|
||||
info!("Deleting chunk data on shutdown...");
|
||||
local_chunk
|
||||
.write()
|
||||
.unwrap()
|
||||
.delete_all()
|
||||
.expect("Failed to delete chunks");
|
||||
info!("Deleting chunk data completed successfully.");
|
||||
}
|
||||
|
||||
/// A loop to clean the database periodically.
|
||||
pub async fn clean_loop(
|
||||
main_db: Arc<RwLock<Mochibase>>,
|
||||
file_path: PathBuf,
|
||||
mut shutdown_signal: Receiver<()>,
|
||||
) {
|
||||
let mut interval = time::interval(TimeDelta::minutes(2).to_std().unwrap());
|
||||
loop {
|
||||
select! {
|
||||
_ = interval.tick() => clean_database(&main_db, &file_path),
|
||||
_ = shutdown_signal.recv() => break,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn clean_chunks(chunk_db: Arc<RwLock<Chunkbase>>, mut shutdown_signal: Receiver<()>) {
|
||||
let mut interval = time::interval(TimeDelta::seconds(30).to_std().unwrap());
|
||||
loop {
|
||||
select! {
|
||||
_ = interval.tick() => {let _ = chunk_db.write().unwrap().delete_timed_out();},
|
||||
_ = shutdown_signal.recv() => break,
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,219 +0,0 @@
|
|||
use maud::{html, Markup, DOCTYPE};
|
||||
use rocket::{get, State};
|
||||
|
||||
use crate::settings::Settings;
|
||||
|
||||
pub fn head(page_title: &str) -> Markup {
|
||||
html! {
|
||||
(DOCTYPE)
|
||||
meta charset="UTF-8";
|
||||
meta name="viewport" content="width=device-width, initial-scale=1";
|
||||
title { (page_title) }
|
||||
link rel="icon" type="image/svg+xml" href="/favicon.svg";
|
||||
link rel="stylesheet" href="/resources/main.css";
|
||||
link rel="preload" href="/resources/fonts/Roboto.woff2" as="font" type="font/woff2" crossorigin;
|
||||
link rel="preload" href="/resources/fonts/FiraCode.woff2" as="font" type="font/woff2" crossorigin;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn footer() -> Markup {
|
||||
let hash = match option_env!("VERGEN_GIT_SHA") {
|
||||
Some(hash) => &hash[0..7],
|
||||
None => "",
|
||||
};
|
||||
|
||||
let pkg_version = env!("CARGO_PKG_VERSION");
|
||||
let hash_link = "https://github.com/Dangoware/confetti-box/commit/".to_string() + hash;
|
||||
|
||||
html! {
|
||||
footer {
|
||||
div {
|
||||
p {a href="/" {"Home"}}
|
||||
p {a href="/about" {"About"}}
|
||||
p {a href="/api" {"API"}}
|
||||
p {a href="https://github.com/Dangoware/confetti-box" {"Source"}}
|
||||
p {a href="https://github.com/Dangoware/" {"Dangoware"}}
|
||||
}
|
||||
p.version { "Running Confetti-Box v" (pkg_version) " " }
|
||||
|
||||
@if !hash.is_empty() {
|
||||
p.version style="font-size: 0.8em" { "(" a style="font-family:'Fira Code'" href=(hash_link) {(hash)} " - " (env!("VERGEN_BUILD_DATE")) ")" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/api")]
|
||||
pub fn api_info(settings: &State<Settings>) -> Markup {
|
||||
let domain = &settings.server.domain;
|
||||
let root = &settings.server.root_path;
|
||||
html! {
|
||||
(head("Confetti-Box | API"))
|
||||
|
||||
center {
|
||||
h1 { "API Information" }
|
||||
hr;
|
||||
|
||||
div style="text-align: left;" {
|
||||
p {
|
||||
"Confetti-Box is designed to be simple to access using its
|
||||
API. All endpoints are accessed following "
|
||||
code{"https://"(domain) (root)} ". All responses are encoded
|
||||
in JSON. MMIDs are a unique identifier for a file returned
|
||||
by the server after a successful upload. All datetimes are
|
||||
in UTC."
|
||||
}
|
||||
p {
|
||||
"The following endpoints are supported:"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/upload/chunked"} }
|
||||
pre { r#"POST JSON{"name":string, "size":int, "expire_duration":int} -> JSON"# }
|
||||
p {
|
||||
"Start here to upload a file. POST some JSON containing the
|
||||
required variables to this endpoint, and you will recieve a
|
||||
UUID and a few other items which you can use to send the
|
||||
follow up requests to actually complete the upload."
|
||||
}
|
||||
p {
|
||||
"Example successful response:"
|
||||
}
|
||||
pre {
|
||||
"{\n\t\"status\": true,\n\t\"message\": \"\",\n\t\"uuid\": \"ca4614b1-04d5-457b-89af-a4e00576f701\",\n\t\"chunk_size\": 20000000\n}"
|
||||
}
|
||||
p {"Example failure response:"}
|
||||
pre {
|
||||
"{\n\t\"status\": false,\n\t\"message\": \"Duration invalid\",\n}"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/upload/chunked/<uuid>?chunk=<chunk>"} }
|
||||
pre { r#"POST <file data> -> ()"# }
|
||||
p {
|
||||
"After completing the " code {"/upload/chunked"} " request,
|
||||
upload data in " code {"chunk_size"} " chunks to this
|
||||
endpoint using the UUID obtained from the initial request.
|
||||
The chunk number is the position in the file in chunks.
|
||||
The client MUST perform as many of these transfers as it
|
||||
takes to upload the entire file. Any duplicated chunks will
|
||||
be rejected. Any rejection means that the file will be
|
||||
deleted and the client SHOULD restart the transaction from
|
||||
the beginning. The client SHOULD retry sending the chunk on
|
||||
network errors."
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/upload/chunked/<uuid>?finish"} }
|
||||
pre { r#"GET -> JSON"# }
|
||||
p {
|
||||
"Once all the chunks have been uploaded, finish the upload
|
||||
by sending a GET request to this endpoint."
|
||||
}
|
||||
p {"Example successful response:"}
|
||||
pre {
|
||||
"{\n\t\"mmid\": \"uVFNeajm\",\n\t\"name\": \"1600-1200.jpg\",\n\t\"mime_type\": \"image/jpeg\",\n\t\"hash\": \"8f92924d52e796a82fd7709b43f5e907949e7098f5b4bc94b314c0bd831e7719\",\n\t\"upload_datetime\": \"2024-11-04T13:23:20.592090428Z\",\n\t\"expiry_datetime\": \"2024-11-04T19:23:20.592090428Z\"\n}"
|
||||
}
|
||||
|
||||
|
||||
hr;
|
||||
h2 { code {"/info"} }
|
||||
pre { r#"GET -> JSON"# }
|
||||
p {
|
||||
"Returns the capabilities of the server."
|
||||
}
|
||||
p {"Example response:"}
|
||||
pre {
|
||||
"{\n\t\"max_filesize\": 5000000000,\n\t\"max_duration\": 259200,\n\t\"default_duration\": 21600,\n\t\"allowed_durations\": [\n\t\t3600,\n\t\t21600,\n\t\t86400,\n\t\t172800\n\t]\n}"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/info/<mmid>"} }
|
||||
pre { r#"GET -> JSON"# }
|
||||
p {
|
||||
"Returns information about a file by its MMID."
|
||||
}
|
||||
p {"Example response:"}
|
||||
pre {
|
||||
"{\n\t\"mmid\": \"xNLF6ogx\",\n\t\"name\": \"1600-1200.jpg\",\n\t\"mime_type\": \"image/png\",\n\t\"hash\": \"2e8e0a493ef99dfd950e870e319213d33573f64ba32b5a5399dd6c79c7d5cf00\",\n\t\"upload_datetime\": \"2024-10-29T22:09:48.648562311Z\",\n\t\"expiry_datetime\": \"2024-10-30T04:09:48.648562311Z\"\n}"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/f/<mmid>"} }
|
||||
pre { r#"GET mmid=MMID -> Redirect or File"# }
|
||||
p {
|
||||
"By default issues a redirect to the full URL for a file. This
|
||||
behavior can be modified by appending " code{"?noredir"} " to
|
||||
the end of this request, like " code{"/f/<mmid>?noredir"} ",
|
||||
in which case it behaves just like " code{"/f/<mmid>/<filename>"}
|
||||
". Appending " code{"download"} " forces the browser to download
|
||||
the file regardless of MIME type."
|
||||
}
|
||||
p {"Example default response:"}
|
||||
pre {"303: /f/xNLF6ogx/1600-1200.jpg"}
|
||||
|
||||
p {"Example modified response:"}
|
||||
pre {"<File Bytes>"}
|
||||
|
||||
hr;
|
||||
h2 { code {"/f/<mmid>/<filename>"} }
|
||||
pre { r#"GET mmid=MMID filename=String -> File"# }
|
||||
p {
|
||||
"Returns the contents of the file corresponding to the
|
||||
requested MMID, but with the corresponding filename so as
|
||||
to preserve it for downloads. Mostly for use by browsers."
|
||||
}
|
||||
p {"Example response:"}
|
||||
pre {
|
||||
"<File Bytes>"
|
||||
}
|
||||
}
|
||||
|
||||
hr;
|
||||
(footer())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/about")]
|
||||
pub fn about() -> Markup {
|
||||
html! {
|
||||
(head("Confetti-Box | About"))
|
||||
|
||||
center {
|
||||
h1 { "What's this?" }
|
||||
hr;
|
||||
|
||||
div style="text-align: left;" {
|
||||
p {
|
||||
"Confetti-Box is a temporary file host, inspired by "
|
||||
a target="_blank" href="//litterbox.catbox.moe" {"Litterbox"}
|
||||
" and " a target="_blank" href="//uguu.se" {"Uguu"} ".
|
||||
It is designed to be simple to use and host! Files are stored
|
||||
until they expire, at which point they are deleted to free up
|
||||
space on the server."
|
||||
}
|
||||
|
||||
p {
|
||||
"Confetti-Box was created by and is maintained by "
|
||||
a target="_blank" href="#dangowaresite" {"Dangoware"} " and is open-source
|
||||
software available under the terms of the "
|
||||
a target="_blank" href="//www.gnu.org/licenses/agpl-3.0.txt" {"AGPL-3.0 license"}
|
||||
". The source code is available on "
|
||||
a target="_blank" href="//github.com/Dangoware/confetti-box" {"GitHub"}
|
||||
". The AGPL is very restrictive when it comes to use on
|
||||
servers, so if you would like to use Confetti-Box for a
|
||||
commercial purpose, please contact Dangoware."
|
||||
}
|
||||
|
||||
p {
|
||||
"If you upload files which are disallowed either legally or
|
||||
by the terms of this particular service, they will be removed."
|
||||
}
|
||||
}
|
||||
|
||||
hr;
|
||||
(footer())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
use rocket::{
|
||||
get,
|
||||
http::ContentType,
|
||||
response::content::{RawCss, RawJavaScript},
|
||||
};
|
||||
|
||||
#[get("/resources/fonts/<font>")]
|
||||
pub fn font_static(font: &str) -> Option<(ContentType, &'static [u8])> {
|
||||
match font {
|
||||
"Roboto.woff2" => Some((
|
||||
ContentType::WOFF2,
|
||||
include_bytes!("../web/fonts/roboto.woff2"),
|
||||
)),
|
||||
"FiraCode.woff2" => Some((
|
||||
ContentType::WOFF2,
|
||||
include_bytes!("../web/fonts/fira-code.woff2"),
|
||||
)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Stylesheet
|
||||
#[get("/resources/main.css")]
|
||||
pub fn stylesheet() -> RawCss<&'static str> {
|
||||
RawCss(include_str!("../web/main.css"))
|
||||
}
|
||||
|
||||
/// Upload handler javascript
|
||||
#[get("/resources/request.js")]
|
||||
pub fn form_handler_js() -> RawJavaScript<&'static str> {
|
||||
RawJavaScript(include_str!("../web/request.js"))
|
||||
}
|
||||
|
||||
#[get("/favicon.svg")]
|
||||
pub fn favicon_svg() -> (ContentType, &'static str) {
|
||||
(ContentType::SVG, include_str!("../web/favicon.svg"))
|
||||
}
|
||||
|
||||
#[get("/favicon.ico")]
|
||||
pub fn favicon_ico() -> (ContentType, &'static [u8]) {
|
||||
(ContentType::Icon, include_bytes!("../web/favicon.ico"))
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 17 KiB |
Binary file not shown.
Before Width: | Height: | Size: 5.7 KiB |
Binary file not shown.
Binary file not shown.
|
@ -1,402 +0,0 @@
|
|||
/*jshint esversion: 11 */
|
||||
|
||||
const TOO_LARGE_TEXT = "Too large!";
|
||||
const ZERO_TEXT = "File is blank!";
|
||||
const ERROR_TEXT = "Error!";
|
||||
const USERAGENT = navigator.userAgent;
|
||||
const USE_CHUNKS_COMPAT = /Ladybird/.test(USERAGENT);
|
||||
|
||||
async function formSubmit() {
|
||||
const form = document.getElementById("uploadForm");
|
||||
const files = form.elements.fileUpload.files;
|
||||
const duration = form.elements.duration.value;
|
||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||
|
||||
await sendFiles(files, duration, maxSize);
|
||||
|
||||
// Reset the form file data since we've successfully submitted it
|
||||
form.elements.fileUpload.value = "";
|
||||
}
|
||||
|
||||
async function dragDropSubmit(evt) {
|
||||
fileButton.style.backgroundColor = "#84E5FF";
|
||||
fileButton.style.removeProperty("transitionDuration");
|
||||
fileButton.style.removeProperty("scale");
|
||||
fileButton.style.removeProperty("transitionTimingFunction");
|
||||
|
||||
const form = document.getElementById("uploadForm");
|
||||
const duration = form.elements.duration.value;
|
||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||
|
||||
evt.preventDefault();
|
||||
|
||||
const files = [];
|
||||
if (evt.dataTransfer.items) {
|
||||
// Use DataTransferItemList interface to access the file(s)
|
||||
[...evt.dataTransfer.items].forEach((item, _) => {
|
||||
// If dropped items aren't files, reject them
|
||||
if (item.kind === "file") {
|
||||
files.push(item.getAsFile());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Use DataTransfer interface to access the file(s)
|
||||
[...evt.dataTransfer.files].forEach((file, _) => {
|
||||
files.push(file.name);
|
||||
});
|
||||
}
|
||||
|
||||
await sendFiles(files, duration, maxSize);
|
||||
}
|
||||
|
||||
async function pasteSubmit(evt) {
|
||||
const form = document.getElementById("uploadForm");
|
||||
const duration = form.elements.duration.value;
|
||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||
|
||||
const files = [];
|
||||
const len = evt.clipboardData.files.length;
|
||||
for (let i = 0; i < len; i++) {
|
||||
const file = evt.clipboardData.files[i];
|
||||
files.push(file);
|
||||
}
|
||||
|
||||
await sendFiles(files, duration, maxSize);
|
||||
}
|
||||
|
||||
async function sendFiles(files, duration, maxSize) {
|
||||
if (USE_CHUNKS_COMPAT) {
|
||||
console.warn("This browser is known to have problems with WebSockets, falling back to chunked upload");
|
||||
}
|
||||
|
||||
const inProgressUploads = new Set();
|
||||
const concurrencyLimit = 10;
|
||||
|
||||
// Try to get a wake-lock
|
||||
let wakeLock = null;
|
||||
try {
|
||||
wakeLock = await navigator.wakeLock.request("screen");
|
||||
} catch (err) {
|
||||
console.warn("Failed to set wake-lock!");
|
||||
}
|
||||
|
||||
let start = performance.now();
|
||||
for (const file of files) {
|
||||
console.log("Started upload for", file.name);
|
||||
|
||||
// Start the upload and add it to the set of in-progress uploads
|
||||
let uploadPromise;
|
||||
if ('WebSocket' in window && window.WebSocket.CLOSING === 2 && !USE_CHUNKS_COMPAT) {
|
||||
console.log("Uploading file using Websockets");
|
||||
uploadPromise = uploadFileWebsocket(file, duration, maxSize);
|
||||
} else {
|
||||
console.log("Uploading file using Chunks");
|
||||
uploadPromise = uploadFileChunked(file, duration, maxSize);
|
||||
}
|
||||
inProgressUploads.add(uploadPromise);
|
||||
|
||||
// Once an upload finishes, remove it from the set
|
||||
uploadPromise.finally(() => inProgressUploads.delete(uploadPromise));
|
||||
|
||||
// If we reached the concurrency limit, wait for one of the uploads to complete
|
||||
if (inProgressUploads.size >= concurrencyLimit) {
|
||||
await Promise.race(inProgressUploads);
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for any remaining uploads to complete
|
||||
await Promise.allSettled(inProgressUploads);
|
||||
let end = performance.now();
|
||||
console.log(end - start);
|
||||
|
||||
try {
|
||||
wakeLock.release().then(() => {
|
||||
wakeLock = null;
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn("Failed to modify wake-lock!");
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadFileChunked(file, duration, maxSize) {
|
||||
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
||||
if (file.size > maxSize) {
|
||||
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
||||
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
|
||||
return;
|
||||
} else if (file.size == 0) {
|
||||
console.error("Provided file has 0 bytes");
|
||||
makeErrored(progressBar, progressText, linkRow, ZERO_TEXT);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get preliminary upload information
|
||||
let chunkedResponse;
|
||||
try {
|
||||
const response = await fetch("/upload/chunked", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
"name": file.name,
|
||||
"size": file.size,
|
||||
"expire_duration": parseInt(duration),
|
||||
}),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`Response status: ${response.status}`);
|
||||
}
|
||||
chunkedResponse = await response.json();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
|
||||
}
|
||||
|
||||
// Upload the file in `chunk_size` chunks
|
||||
const chunkUploads = new Set();
|
||||
const progressValues = [];
|
||||
const concurrencyLimit = 5;
|
||||
for (let chunk_num = 0; chunk_num < Math.floor(file.size / chunkedResponse.chunk_size) + 1; chunk_num ++) {
|
||||
const offset = Math.floor(chunk_num * chunkedResponse.chunk_size);
|
||||
const chunk = file.slice(offset, offset + chunkedResponse.chunk_size);
|
||||
const url = "/upload/chunked/" + chunkedResponse.uuid + "?chunk=" + chunk_num;
|
||||
const ID = progressValues.push(0);
|
||||
|
||||
let upload = new Promise(function (resolve, reject) {
|
||||
let request = new XMLHttpRequest();
|
||||
request.open("POST", url, true);
|
||||
request.upload.addEventListener('progress',
|
||||
(p) => {uploadProgress(p, progressBar, progressText, progressValues, file.size, ID);}, true
|
||||
);
|
||||
|
||||
request.onload = (e) => {
|
||||
if (e.target.status >= 200 && e.target.status < 300) {
|
||||
resolve(request.response);
|
||||
} else {
|
||||
reject({status: e.target.status, statusText: request.statusText});
|
||||
}
|
||||
};
|
||||
request.onerror = (e) => {
|
||||
reject({status: e.target.status, statusText: request.statusText})
|
||||
};
|
||||
request.send(chunk);
|
||||
});
|
||||
|
||||
chunkUploads.add(upload);
|
||||
upload.finally(() => chunkUploads.delete(upload));
|
||||
if (chunkUploads.size >= concurrencyLimit) {
|
||||
await Promise.race(chunkUploads);
|
||||
}
|
||||
}
|
||||
await Promise.allSettled(chunkUploads);
|
||||
|
||||
// Finish the request and update the progress box
|
||||
const result = await fetch("/upload/chunked/" + chunkedResponse.uuid + "?finish");
|
||||
let responseJson = null;
|
||||
if (result.status == 200) {
|
||||
responseJson = await result.json()
|
||||
}
|
||||
uploadComplete(responseJson, result.status, progressBar, progressText, linkRow);
|
||||
}
|
||||
|
||||
async function uploadFileWebsocket(file, duration, maxSize) {
|
||||
|
||||
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
||||
if (file.size > maxSize) {
|
||||
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
||||
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
|
||||
return;
|
||||
} else if (file.size == 0) {
|
||||
console.error("Provided file has 0 bytes");
|
||||
makeErrored(progressBar, progressText, linkRow, ZERO_TEXT);
|
||||
return;
|
||||
}
|
||||
|
||||
// Open the websocket connection
|
||||
let loc = window.location, new_uri;
|
||||
if (loc.protocol === "https:") {
|
||||
new_uri = "wss:";
|
||||
} else {
|
||||
new_uri = "ws:";
|
||||
}
|
||||
new_uri += "//" + loc.host;
|
||||
new_uri += "/upload/websocket?name=" + file.name +"&size=" + file.size + "&duration=" + parseInt(duration);
|
||||
const socket = new WebSocket(new_uri);
|
||||
socket.binaryType = "arraybuffer";
|
||||
|
||||
// Ensure that the websocket gets closed if the page is unloaded
|
||||
window.onbeforeunload = function() {
|
||||
socket.onclose = function () {};
|
||||
socket.close();
|
||||
};
|
||||
|
||||
const chunkSize = 5_000_000;
|
||||
socket.addEventListener("open", (_event) => {
|
||||
for (let chunk_num = 0; chunk_num < Math.floor(file.size / chunkSize) + 1; chunk_num ++) {
|
||||
const offset = Math.floor(chunk_num * chunkSize);
|
||||
const chunk = file.slice(offset, offset + chunkSize);
|
||||
|
||||
socket.send(chunk);
|
||||
}
|
||||
|
||||
socket.send("");
|
||||
});
|
||||
|
||||
return new Promise(function(resolve, _reject) {
|
||||
socket.addEventListener("message", (event) => {
|
||||
if (event.data instanceof ArrayBuffer) {
|
||||
const view = new DataView(event.data);
|
||||
console.log(view.getBigUint64(0, true));
|
||||
const progress = parseInt(view.getBigUint64(0, true));
|
||||
uploadProgressWebsocket(progress, progressBar, progressText, file.size);
|
||||
} else {
|
||||
// It's so over
|
||||
if (!socket.CLOSED) {
|
||||
socket.close();
|
||||
}
|
||||
|
||||
const response = JSON.parse(event.data);
|
||||
uploadComplete(response, 200, progressBar, progressText, linkRow);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function addNewToList(origFileName) {
|
||||
const uploadedFilesDisplay = document.getElementById("uploadedFilesDisplay");
|
||||
const linkRow = uploadedFilesDisplay.appendChild(document.createElement("div"));
|
||||
const fileName = linkRow.appendChild(document.createElement("p"));
|
||||
const progressBar = linkRow.appendChild(document.createElement("progress"));
|
||||
const progressTxt = linkRow.appendChild(document.createElement("p"));
|
||||
|
||||
fileName.textContent = origFileName;
|
||||
fileName.classList.add("file_name");
|
||||
progressTxt.classList.add("status");
|
||||
progressTxt.textContent = "⏳";
|
||||
progressBar.max="100";
|
||||
|
||||
return [linkRow, progressBar, progressTxt];
|
||||
}
|
||||
|
||||
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
|
||||
if (progress.lengthComputable) {
|
||||
progressValues[ID] = progress.loaded;
|
||||
const progressTotal = progressValues.reduce((a, b) => a + b, 0);
|
||||
|
||||
const progressPercent = Math.floor((progressTotal / fileSize) * 100);
|
||||
if (progressPercent == 100) {
|
||||
progressBar.removeAttribute("value");
|
||||
progressText.textContent = "⏳";
|
||||
} else {
|
||||
progressBar.value = progressTotal;
|
||||
progressBar.max = fileSize;
|
||||
progressText.textContent = progressPercent + "%";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function uploadProgressWebsocket(bytesFinished, progressBar, progressText, fileSize) {
|
||||
const progressPercent = Math.floor((bytesFinished / fileSize) * 100);
|
||||
if (progressPercent == 100) {
|
||||
progressBar.removeAttribute("value");
|
||||
progressText.textContent = "⏳";
|
||||
} else {
|
||||
progressBar.value = bytesFinished;
|
||||
progressBar.max = fileSize;
|
||||
progressText.textContent = progressPercent + "%";
|
||||
}
|
||||
}
|
||||
|
||||
async function uploadComplete(responseJson, status, progressBar, progressText, linkRow) {
|
||||
if (status === 200) {
|
||||
console.log("Successfully uploaded file", responseJson);
|
||||
makeFinished(progressBar, progressText, linkRow, responseJson);
|
||||
} else if (status === 413) {
|
||||
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
|
||||
} else {
|
||||
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
|
||||
}
|
||||
}
|
||||
|
||||
function makeErrored(progressBar, progressText, linkRow, errorMessage) {
|
||||
progressText.textContent = errorMessage;
|
||||
progressBar.style.display = "none";
|
||||
linkRow.classList.add("upload_failed");
|
||||
}
|
||||
|
||||
function makeFinished(progressBar, progressText, linkRow, response) {
|
||||
progressText.textContent = "";
|
||||
const link = progressText.appendChild(document.createElement("a"));
|
||||
link.textContent = response.mmid;
|
||||
link.href = "/f/" + response.mmid;
|
||||
link.target = "_blank";
|
||||
|
||||
let button = linkRow.appendChild(document.createElement("button"));
|
||||
button.textContent = "📝";
|
||||
let buttonTimeout = null;
|
||||
button.addEventListener('click', function(_e) {
|
||||
const mmid = response.mmid;
|
||||
if (buttonTimeout) {
|
||||
clearTimeout(buttonTimeout);
|
||||
}
|
||||
navigator.clipboard.writeText(
|
||||
window.location.protocol + "//" + window.location.host + "/f/" + mmid
|
||||
);
|
||||
button.textContent = "✅";
|
||||
buttonTimeout = setTimeout(function() {
|
||||
button.textContent = "📝";
|
||||
}, 750);
|
||||
});
|
||||
|
||||
progressBar.style.display = "none";
|
||||
linkRow.classList.add("upload_done");
|
||||
}
|
||||
|
||||
async function initEverything() {
|
||||
const durationBox = document.getElementById("durationBox");
|
||||
const durationButtons = durationBox.getElementsByTagName("button");
|
||||
for (const b of durationButtons) {
|
||||
b.addEventListener("click", function (_e) {
|
||||
if (this.classList.contains("selected")) {
|
||||
return;
|
||||
}
|
||||
document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds;
|
||||
let selected = this.parentNode.getElementsByClassName("selected");
|
||||
selected[0].classList.remove("selected");
|
||||
this.classList.add("selected");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// This is the entrypoint for everything basically
|
||||
document.addEventListener("DOMContentLoaded", function(_event) {
|
||||
// Respond to form submissions
|
||||
const form = document.getElementById("uploadForm");
|
||||
form.addEventListener("submit", formSubmit);
|
||||
|
||||
// Respond to file paste events
|
||||
window.addEventListener("paste", (event) => {
|
||||
pasteSubmit(event)
|
||||
});
|
||||
|
||||
// Respond to drag and drop stuff
|
||||
let fileButton = document.getElementById("fileButton");
|
||||
document.addEventListener("drop", (e) => {e.preventDefault();}, false);
|
||||
document.addEventListener("dragover", (e) => {e.preventDefault()}, false);
|
||||
fileButton.addEventListener("dragover", (e) => {
|
||||
e.preventDefault();
|
||||
fileButton.style.backgroundColor = "#9cff7e";
|
||||
fileButton.style.transitionDuration = "0.5s";
|
||||
fileButton.style.scale = "1.1";
|
||||
fileButton.style.transitionTimingFunction = "cubic-bezier(.23,-0.09,.52,1.62)";
|
||||
}, false);
|
||||
fileButton.addEventListener("dragleave", (e) => {
|
||||
e.preventDefault();
|
||||
fileButton.style.backgroundColor = "#84E5FF";
|
||||
fileButton.style.removeProperty("transitionDuration");
|
||||
fileButton.style.removeProperty("scale");
|
||||
fileButton.style.removeProperty("transitionTimingFunction");
|
||||
}, false);
|
||||
fileButton.addEventListener("drop", dragDropSubmit, false);
|
||||
|
||||
initEverything();
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "confetti_cli"
|
||||
version = "0.1.1"
|
||||
description = "A simple command line interface to interact with a Confetti-Box instance."
|
||||
repository = "https://github.com/Dangoware/confetti-box"
|
||||
keywords = ["selfhost", "upload", "command_line"]
|
||||
categories = ["command-line-utilities"]
|
||||
authors.workspace = true
|
||||
license = "AGPL-3.0-or-later"
|
||||
edition = "2024"
|
||||
|
||||
[[bin]]
|
||||
name = "imu"
|
||||
path = "src/main.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
base64 = "0.22.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.5", features = ["derive", "unicode"] }
|
||||
directories = "6.0"
|
||||
futures-util = "0.3.31"
|
||||
indicatif = { version = "0.17", features = ["improved_unicode"] }
|
||||
owo-colors = { version = "4.1", features = ["supports-colors"] }
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.41", features = ["fs", "macros", "rt-multi-thread"] }
|
||||
tokio-tungstenite = { version = "0.26.2", features = ["native-tls"] }
|
||||
tokio-util = { version = "0.7", features = ["codec"] }
|
||||
toml = "0.8"
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
uuid = { version = "1.11", features = ["serde", "v4"] }
|
|
@ -1,662 +0,0 @@
|
|||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
|
@ -1,824 +0,0 @@
|
|||
use std::{error::Error, fs, io::{self, Read, Write}, path::{Path, PathBuf}};
|
||||
|
||||
use base64::{prelude::BASE64_URL_SAFE, Engine};
|
||||
use chrono::{DateTime, Datelike, Local, Month, TimeDelta, Timelike, Utc};
|
||||
|
||||
use futures_util::{stream::FusedStream as _, SinkExt as _, StreamExt as _};
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use owo_colors::OwoColorize;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use tokio::{fs::File, io::{AsyncReadExt, AsyncWriteExt}, join, task::JoinSet};
|
||||
use tokio_tungstenite::{connect_async, tungstenite::{client::IntoClientRequest as _, Message}};
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
use clap::{arg, builder::{styling::RgbColor, Styles}, Parser, Subcommand};
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
|
||||
const CLAP_STYLE: Styles = Styles::styled()
|
||||
.header(RgbColor::on_default(RgbColor(197,229,207)).italic())
|
||||
.usage(RgbColor::on_default(RgbColor(174,196,223)))
|
||||
.literal(RgbColor::on_default(RgbColor(246,199,219)))
|
||||
.placeholder(RgbColor::on_default(RgbColor(117,182,194)))
|
||||
.error(RgbColor::on_default(RgbColor(181,66,127)).underline());
|
||||
|
||||
const DEBUG_CONFIG: &str = "test/config.toml";
|
||||
const DEBUG_DOWNLOAD_DIR: &str = "test/downloads/";
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "confetti_cli")]
|
||||
#[command(version, about, long_about = None)]
|
||||
#[command(styles = CLAP_STYLE)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Upload files
|
||||
#[command(visible_alias="u")]
|
||||
Upload {
|
||||
/// Filename(s) to upload
|
||||
#[arg(value_name = "file(s)", required = true)]
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// Expiration length of the uploaded file
|
||||
#[arg(short, long, default_value = "6h")]
|
||||
duration: String,
|
||||
},
|
||||
|
||||
/// Set config options
|
||||
Set {
|
||||
/// Set the username for a server which requires login
|
||||
#[arg(short, long, required = false)]
|
||||
username: Option<String>,
|
||||
/// Set the password for a server which requires login
|
||||
#[arg(short, long, required = false)]
|
||||
password: Option<String>,
|
||||
/// Set the URL of the server to connect to (assumes https://)
|
||||
#[arg(long, required = false)]
|
||||
url: Option<String>,
|
||||
/// Set the directory to download into by default
|
||||
#[arg(value_name="directory", short_alias='d', long, required = false)]
|
||||
dl_dir: Option<String>,
|
||||
},
|
||||
|
||||
/// Get server information manually
|
||||
Info,
|
||||
|
||||
/// Download files
|
||||
#[command(visible_alias="d")]
|
||||
Download {
|
||||
/// MMID to download
|
||||
#[arg(value_name = "mmid(s)", required = true)]
|
||||
mmids: Vec<String>,
|
||||
#[arg(short, long, value_name = "out", required = false)]
|
||||
out_directory: Option<PathBuf>
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
let mut config = Config::open().unwrap();
|
||||
|
||||
match &cli.command {
|
||||
Commands::Upload { files, duration } => {
|
||||
let Some(url) = config.url.clone() else {
|
||||
exit_error(
|
||||
format!("URL is empty"),
|
||||
Some(format!("Please set it using the {} command", "set".truecolor(246,199,219).bold())),
|
||||
None,
|
||||
);
|
||||
};
|
||||
|
||||
get_info_if_expired(&mut config).await?;
|
||||
|
||||
let duration = match parse_time_string(&duration) {
|
||||
Ok(d) => d,
|
||||
Err(e) => return Err(anyhow!("Invalid duration: {e}")),
|
||||
};
|
||||
|
||||
if !config.info.as_ref().unwrap().allowed_durations.contains(&duration.num_seconds()) {
|
||||
let pretty_durations: Vec<String> = config.info.as_ref()
|
||||
.unwrap()
|
||||
.allowed_durations
|
||||
.clone()
|
||||
.iter()
|
||||
.map(|d| pretty_time_short(*d))
|
||||
.collect();
|
||||
|
||||
exit_error(
|
||||
format!("Duration not allowed."),
|
||||
Some(format!("Please choose from:")),
|
||||
Some(pretty_durations)
|
||||
);
|
||||
}
|
||||
|
||||
println!("Uploading...");
|
||||
for path in files {
|
||||
if !path.try_exists().is_ok_and(|t| t) {
|
||||
print_error_line(format!("The file {:#?} does not exist", path.truecolor(234, 129, 100)));
|
||||
continue;
|
||||
}
|
||||
|
||||
let name = path.file_name().unwrap().to_string_lossy();
|
||||
let response = upload_file(
|
||||
name.into_owned(),
|
||||
&path,
|
||||
&url,
|
||||
duration,
|
||||
&config.login
|
||||
).await.with_context(|| "Failed to upload").unwrap();
|
||||
|
||||
let datetime: DateTime<Local> = DateTime::from(response.expiry_datetime);
|
||||
let date = format!(
|
||||
"{} {}",
|
||||
Month::try_from(u8::try_from(datetime.month()).unwrap()).unwrap().name(),
|
||||
datetime.day(),
|
||||
);
|
||||
let time = format!("{:02}:{:02}", datetime.hour(), datetime.minute());
|
||||
println!(
|
||||
"{:>8} {}, {} (in {})\n{:>8} {}",
|
||||
"Expires:".truecolor(174,196,223).bold(), date, time, pretty_time_long(duration.num_seconds()),
|
||||
"URL:".truecolor(174,196,223).bold(), (url.to_string() + "/f/" + &response.mmid.0).underline()
|
||||
);
|
||||
}
|
||||
}
|
||||
Commands::Download { mmids, out_directory } => {
|
||||
let Some(url) = config.url else {
|
||||
exit_error(
|
||||
format!("URL is empty"),
|
||||
Some(format!("Please set it using the {} command", "set".truecolor(246,199,219).bold())),
|
||||
None,
|
||||
);
|
||||
};
|
||||
|
||||
let out_directory = if let Some(dir) = out_directory {
|
||||
dir
|
||||
} else {
|
||||
let ddir = &config.download_directory;
|
||||
if ddir.as_os_str().is_empty() {
|
||||
exit_error(
|
||||
"Default download directory is empty".into(),
|
||||
Some(format!("Please set it using the {} command", "set".truecolor(246,199,219).bold())),
|
||||
None,
|
||||
);
|
||||
} else if !ddir.exists() {
|
||||
exit_error(
|
||||
format!("Default download directory {} does not exist", ddir.display()),
|
||||
Some(format!("Please set it using the {} command", "set".truecolor(246,199,219).bold())),
|
||||
None,
|
||||
)
|
||||
} else {
|
||||
ddir
|
||||
}
|
||||
};
|
||||
|
||||
for mmid in mmids {
|
||||
let mmid = if mmid.len() != 8 {
|
||||
if mmid.contains(format!("{url}/f/").as_str()) {
|
||||
let mmid = mmid.replace(format!("{url}/f/").as_str(), "");
|
||||
if mmid.len() != 8 {
|
||||
exit_error("{mmid} is not a valid MMID".into(), Some("MMID must be 8 characters long".into()), None)
|
||||
} else {
|
||||
mmid
|
||||
}
|
||||
} else {
|
||||
exit_error("{mmid} is not a valid MMID".into(), Some("MMID must be 8 characters long".into()), None)
|
||||
}
|
||||
} else {
|
||||
unimplemented!();
|
||||
};
|
||||
|
||||
let client = Client::new();
|
||||
|
||||
let info = if let Ok(file) = if let Some(login) = &config.login {
|
||||
client.get(format!("{}/info/{mmid}", url))
|
||||
.basic_auth(&login.user, Some(&login.pass))
|
||||
} else {
|
||||
client.get(format!("{}/info/{mmid}", url))
|
||||
}
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.json::<MochiFile>()
|
||||
.await {
|
||||
file
|
||||
} else {
|
||||
exit_error(format!("File with MMID {mmid} was not found"), None, None)
|
||||
};
|
||||
|
||||
let mut file_res = if let Some(login) = &config.login {
|
||||
client.get(format!("{}/f/{mmid}", url))
|
||||
.basic_auth(&login.user, Some(&login.pass))
|
||||
} else {
|
||||
client.get(format!("{}/f/{mmid}", url))
|
||||
}
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let out_directory = out_directory.join(info.name);
|
||||
let mut out_file: File = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.read(true)
|
||||
.open(&out_directory).await
|
||||
.unwrap();
|
||||
|
||||
let progress_bar = ProgressBar::new(100);
|
||||
|
||||
progress_bar.set_style(ProgressStyle::with_template(
|
||||
&format!("{} {} {{bar:40.cyan/blue}} {{pos:>3}}% {{msg}}","Saving".bold(), &out_directory.file_name().unwrap().to_string_lossy().truecolor(246,199,219))
|
||||
).unwrap());
|
||||
|
||||
let mut chunk_size = 0;
|
||||
let file_size = file_res.content_length().unwrap();
|
||||
let mut first = true;
|
||||
|
||||
let mut i = 0;
|
||||
while let Some(next) = file_res.chunk().await.unwrap() {
|
||||
i+=1;
|
||||
if first {
|
||||
chunk_size = next.len() as u64;
|
||||
first = false
|
||||
}
|
||||
out_file.write(&next).await.unwrap();
|
||||
|
||||
progress_bar.set_position(f64::trunc(((i as f64 * chunk_size as f64) / file_size as f64) * 200.0) as u64);
|
||||
}
|
||||
progress_bar.finish_and_clear();
|
||||
|
||||
println!("Downloaded to \"{}\"", out_directory.display());
|
||||
}
|
||||
}
|
||||
Commands::Set {
|
||||
username,
|
||||
password,
|
||||
url,
|
||||
dl_dir
|
||||
} => {
|
||||
if username.is_none() && password.is_none() && url.is_none() && dl_dir.is_none() {
|
||||
exit_error(
|
||||
format!("Please provide an option to set"),
|
||||
Some(format!("Allowed options:")),
|
||||
Some(vec!["--username".into(), "--password".into(), "--url".into(), "--dl-dir".into()]),
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(u) = username {
|
||||
if u.is_empty() {
|
||||
exit_error(format!("Username cannot be blank!"), None, None);
|
||||
}
|
||||
|
||||
if let Some(l) = config.login.as_mut() {
|
||||
l.user = u.clone();
|
||||
} else {
|
||||
config.login = Login {
|
||||
user: u.clone(),
|
||||
pass: "".into()
|
||||
}.into();
|
||||
}
|
||||
|
||||
config.save().unwrap();
|
||||
println!("Username set to \"{u}\"")
|
||||
}
|
||||
if let Some(p) = password {
|
||||
if p.is_empty() {
|
||||
exit_error(format!("Password cannot be blank"), None, None);
|
||||
}
|
||||
|
||||
if let Some(l) = config.login.as_mut() {
|
||||
l.pass = p.clone();
|
||||
} else {
|
||||
config.login = Login {
|
||||
user: "".into(),
|
||||
pass: p.clone()
|
||||
}.into();
|
||||
}
|
||||
|
||||
config.save().unwrap();
|
||||
println!("Password set")
|
||||
}
|
||||
if let Some(url) = url {
|
||||
if url.is_empty() {
|
||||
exit_error(format!("URL cannot be blank"), None, None);
|
||||
}
|
||||
|
||||
let url = if url.chars().last() == Some('/') {
|
||||
url.split_at(url.len() - 1).0
|
||||
} else {
|
||||
url
|
||||
};
|
||||
|
||||
let new_url = if !url.starts_with("https://") && !url.starts_with("http://") {
|
||||
("https://".to_owned() + url).to_string()
|
||||
} else {
|
||||
url.to_string()
|
||||
};
|
||||
|
||||
config.url = Some(Url::parse(&new_url)?);
|
||||
|
||||
config.save().unwrap();
|
||||
println!("URL set to \"{url}\"");
|
||||
}
|
||||
if let Some(mut dir) = dl_dir.clone() {
|
||||
if dir.is_empty() {
|
||||
exit_error(format!("Download directory cannot be blank"), None, None);
|
||||
}
|
||||
if dir.as_str() == "default" {
|
||||
dir = directories::UserDirs::new()
|
||||
.unwrap()
|
||||
.download_dir()
|
||||
.unwrap_or_else(|| exit_error("No Default directory available".into(), None, None))
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
}
|
||||
if dir.chars().last() != Some('/') {
|
||||
dir.push('/');
|
||||
}
|
||||
|
||||
let _dir = PathBuf::from(dir.clone());
|
||||
if !_dir.exists() {
|
||||
exit_error(format!("Directory {dir} does not exist"), None, None)
|
||||
}
|
||||
|
||||
config.download_directory = _dir;
|
||||
config.save().unwrap();
|
||||
println!("Download directory set to \"{dir}\"");
|
||||
}
|
||||
}
|
||||
Commands::Info => {
|
||||
let info = match get_info(&config).await {
|
||||
Ok(i) => i,
|
||||
Err(e) => exit_error(format!("Failed to get server information!"), Some(e.to_string()), None),
|
||||
};
|
||||
config.info = Some(info);
|
||||
config.save().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
enum UploadError {
|
||||
#[error("request provided was invalid: {0}")]
|
||||
WebSocketFailed(String),
|
||||
|
||||
#[error("error on reqwest transaction: {0}")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
async fn upload_file<P: AsRef<Path>>(
|
||||
name: String,
|
||||
path: &P,
|
||||
url: &Url,
|
||||
duration: TimeDelta,
|
||||
login: &Option<Login>,
|
||||
) -> Result<MochiFile, UploadError> {
|
||||
let mut file = File::open(path).await.unwrap();
|
||||
let file_size = file.metadata().await.unwrap().len();
|
||||
|
||||
// Construct the URL
|
||||
let mut url = url.clone();
|
||||
if url.scheme() == "http" {
|
||||
url.set_scheme("ws").unwrap();
|
||||
} else if url.scheme() == "https" {
|
||||
url.set_scheme("wss").unwrap();
|
||||
}
|
||||
|
||||
url.set_path("/upload/websocket");
|
||||
url.set_query(Some(&format!("name={}&size={}&duration={}", name, file_size, duration.num_seconds())));
|
||||
|
||||
let mut request = url.to_string().into_client_request().unwrap();
|
||||
|
||||
if let Some(l) = login {
|
||||
request.headers_mut().insert(
|
||||
"Authorization",
|
||||
format!("Basic {}", BASE64_URL_SAFE.encode(format!("{}:{}", l.user, l.pass))).parse().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let (stream, _response) = connect_async(request).await.map_err(|e| UploadError::WebSocketFailed(e.to_string()))?;
|
||||
let (mut write, mut read) = stream.split();
|
||||
|
||||
// Upload the file in chunks
|
||||
let upload_task = async move {
|
||||
let mut chunk = vec![0u8; 20_000];
|
||||
loop {
|
||||
let read_len = file.read(&mut chunk).await.unwrap();
|
||||
if read_len == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
write.send(Message::binary(chunk[..read_len].to_vec())).await.unwrap();
|
||||
}
|
||||
|
||||
// Close the stream because sending is over
|
||||
write.send(Message::binary(b"".as_slice())).await.unwrap();
|
||||
write.flush().await.unwrap();
|
||||
|
||||
write
|
||||
};
|
||||
|
||||
let bar = ProgressBar::new(100);
|
||||
bar.set_style(ProgressStyle::with_template(
|
||||
&format!("{} {{bar:40.cyan/blue}} {{pos:>3}}% {{msg}}", name)
|
||||
).unwrap());
|
||||
|
||||
// Get the progress of the file upload
|
||||
let progress_task = async move {
|
||||
let final_json = loop {
|
||||
let Some(p) = read.next().await else {
|
||||
break String::new()
|
||||
};
|
||||
|
||||
let p = p.unwrap();
|
||||
|
||||
// Got the final json information, return that
|
||||
if p.is_text() {
|
||||
break p.into_text().unwrap().to_string()
|
||||
}
|
||||
|
||||
// Get the progress information
|
||||
let prog = p.into_data();
|
||||
let prog = u64::from_le_bytes(prog.to_vec().try_into().unwrap());
|
||||
let percent = f64::trunc((prog as f64 / file_size as f64) * 100.0);
|
||||
if percent <= 100. {
|
||||
bar.set_position(percent as u64);
|
||||
}
|
||||
};
|
||||
|
||||
(read, final_json, bar)
|
||||
};
|
||||
|
||||
// Wait for both of the tasks to finish
|
||||
let (read, write) = join!(progress_task, upload_task);
|
||||
let (read, final_json, bar) = read;
|
||||
let mut stream = write.reunite(read).unwrap();
|
||||
|
||||
let file_info: MochiFile = serde_json::from_str(&final_json).unwrap();
|
||||
|
||||
// If the websocket isn't closed, do that
|
||||
if !stream.is_terminated() {
|
||||
stream.close(None).await.unwrap();
|
||||
}
|
||||
|
||||
bar.finish_and_clear();
|
||||
|
||||
Ok(file_info)
|
||||
}
|
||||
|
||||
async fn get_info_if_expired(config: &mut Config) -> Result<()> {
|
||||
let now = Utc::now();
|
||||
if !config.info_fetch.is_none() && !config.info_fetch.is_some_and(|e| e <= now) {
|
||||
// Not yet ready to get a new batch of info
|
||||
return Ok(())
|
||||
}
|
||||
println!("{}", "Getting new server info...".truecolor(255,249,184));
|
||||
|
||||
let info = get_info(&config).await?;
|
||||
config.info = Some(info);
|
||||
config.info_fetch = Some(now + TimeDelta::days(2));
|
||||
config.save().unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_info(config: &Config) -> Result<ServerInfo> {
|
||||
let Some(url) = config.url.clone() else {
|
||||
exit_error(
|
||||
format!("URL is empty"),
|
||||
Some(format!("Please set it using the {} command", "set".truecolor(246,199,219).bold())),
|
||||
None,
|
||||
);
|
||||
};
|
||||
let client = Client::new();
|
||||
|
||||
let get_info = client.get(format!("{url}/info"));
|
||||
let get_info = if let Some(l) = &config.login {
|
||||
get_info.basic_auth(&l.user, l.pass.clone().into())
|
||||
} else {
|
||||
get_info
|
||||
};
|
||||
|
||||
let info = get_info.send().await.unwrap();
|
||||
if info.status() == 401 {
|
||||
let err = info.error_for_status().unwrap_err();
|
||||
bail!(
|
||||
"Got access denied! Maybe you need a username and password? ({} - {})",
|
||||
err.status().unwrap().as_str(),
|
||||
err.status().unwrap().canonical_reason().unwrap_or_default()
|
||||
)
|
||||
}
|
||||
let info = match info.error_for_status() {
|
||||
Ok(i) => i.json::<ServerInfo>().await?,
|
||||
Err(e) => bail!(
|
||||
"Network error: ({} - {})",
|
||||
e.status().unwrap().as_str(),
|
||||
e.status().unwrap().canonical_reason().unwrap_or_default()
|
||||
),
|
||||
};
|
||||
|
||||
Ok(info)
|
||||
}
|
||||
|
||||
/// Attempts to fill a buffer completely from a stream, but if it cannot do so,
|
||||
/// it will only fill what it can read. If it has reached the end of a file, 0
|
||||
/// bytes will be read into the buffer.
|
||||
async fn fill_buffer<S: AsyncReadExt + Unpin>(buffer: &mut [u8], mut stream: S) -> Result<usize, io::Error> {
|
||||
let mut bytes_read = 0;
|
||||
while bytes_read < buffer.len() {
|
||||
let len = stream.read(&mut buffer[bytes_read..]).await?;
|
||||
|
||||
if len == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
bytes_read += len;
|
||||
}
|
||||
Ok(bytes_read)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Upload {
|
||||
file: File,
|
||||
name: String,
|
||||
duration: i64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
struct ServerInfo {
|
||||
max_filesize: u64,
|
||||
max_duration: i64,
|
||||
default_duration: i64,
|
||||
allowed_durations: Vec<i64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
pub struct ChunkedInfo {
|
||||
pub name: String,
|
||||
pub size: u64,
|
||||
pub expire_duration: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Debug)]
|
||||
pub struct ChunkedResponse {
|
||||
status: bool,
|
||||
message: String,
|
||||
|
||||
/// UUID used for associating the chunk with the final file
|
||||
uuid: Option<Uuid>,
|
||||
|
||||
/// Valid max chunk size in bytes
|
||||
chunk_size: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct MochiFile {
|
||||
/// A unique identifier describing this file
|
||||
mmid: Mmid,
|
||||
|
||||
/// The original name of the file
|
||||
name: String,
|
||||
|
||||
/// The MIME type of the file
|
||||
mime_type: String,
|
||||
|
||||
/// The Blake3 hash of the file
|
||||
hash: String,
|
||||
|
||||
/// The datetime when the file was uploaded
|
||||
upload_datetime: DateTime<Utc>,
|
||||
|
||||
/// The datetime when the file is set to expire
|
||||
expiry_datetime: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Mmid(String);
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
struct Login {
|
||||
user: String,
|
||||
pass: String
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Default)]
|
||||
#[serde(default)]
|
||||
struct Config {
|
||||
url: Option<Url>,
|
||||
login: Option<Login>,
|
||||
/// The time when the info was last fetched
|
||||
info_fetch: Option<DateTime<Utc>>,
|
||||
info: Option<ServerInfo>,
|
||||
download_directory: PathBuf,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
fn open() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let c = if cfg!(debug_assertions) {
|
||||
if let Ok(str) = fs::read_to_string(DEBUG_CONFIG) {
|
||||
str
|
||||
} else {
|
||||
let c = Config {
|
||||
url: None,
|
||||
login: None,
|
||||
info_fetch: None,
|
||||
info: None,
|
||||
download_directory: PathBuf::from(DEBUG_DOWNLOAD_DIR)
|
||||
};
|
||||
c.save().unwrap();
|
||||
return Ok(c);
|
||||
}
|
||||
} else {
|
||||
if let Some(dir) = directories::ProjectDirs::from("", "Dangoware", "confetti_cli") {
|
||||
let path = dir.config_dir();
|
||||
fs::create_dir(path).or_else(|err| {
|
||||
if err.kind() == std::io::ErrorKind::AlreadyExists {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(err)
|
||||
}
|
||||
})?;
|
||||
|
||||
let mut buf: String = String::new();
|
||||
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.read(true)
|
||||
.open(path.join("config.toml"))
|
||||
.unwrap()
|
||||
.read_to_string(&mut buf)
|
||||
.unwrap();
|
||||
|
||||
if buf.is_empty() {
|
||||
let c = Config {
|
||||
url: None,
|
||||
login: None,
|
||||
info: None,
|
||||
info_fetch: None,
|
||||
download_directory: PathBuf::from(directories::UserDirs::new().unwrap().download_dir().unwrap_or(Path::new("")))
|
||||
};
|
||||
c.save().unwrap();
|
||||
|
||||
// dbg!(path);
|
||||
return Ok(c);
|
||||
} else {
|
||||
buf
|
||||
}
|
||||
} else {
|
||||
panic!("no project dir?")
|
||||
}
|
||||
};
|
||||
|
||||
Ok(toml::from_str::<Config>(c.as_str()).unwrap())
|
||||
}
|
||||
|
||||
fn save(&self) -> Result<(), ()> {
|
||||
let path = if cfg!(debug_assertions) {
|
||||
DEBUG_CONFIG.to_string()
|
||||
} else {
|
||||
if let Some(dir) = directories::ProjectDirs::from("", "Dangoware", "confetti_cli") {
|
||||
let path = dir.config_dir();
|
||||
fs::create_dir(path).or_else(|err| {
|
||||
if err.kind() == std::io::ErrorKind::AlreadyExists {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(err)
|
||||
}
|
||||
}).unwrap();
|
||||
let x = path.join("config.toml");
|
||||
x.clone().to_str().unwrap().to_string()
|
||||
} else {
|
||||
panic!("no project dir?")
|
||||
}
|
||||
};
|
||||
|
||||
fs::OpenOptions::new().create(true).write(true).truncate(true).open(path).unwrap().write_all(toml::to_string(self).unwrap().as_bytes()).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_time_string(string: &str) -> Result<TimeDelta, Box<dyn Error>> {
|
||||
if string.len() > 7 {
|
||||
return Err("Not valid time string".into());
|
||||
}
|
||||
|
||||
let unit = string.chars().last();
|
||||
let multiplier = if let Some(u) = unit {
|
||||
if !u.is_ascii_alphabetic() {
|
||||
return Err("Not valid time string".into());
|
||||
}
|
||||
|
||||
match u {
|
||||
'D' | 'd' => TimeDelta::days(1),
|
||||
'H' | 'h' => TimeDelta::hours(1),
|
||||
'M' | 'm' => TimeDelta::minutes(1),
|
||||
'S' | 's' => TimeDelta::seconds(1),
|
||||
_ => return Err("Not valid time string".into()),
|
||||
}
|
||||
} else {
|
||||
return Err("Not valid time string".into());
|
||||
};
|
||||
|
||||
let time = if let Ok(n) = string[..string.len() - 1].parse::<i32>() {
|
||||
n
|
||||
} else {
|
||||
return Err("Not valid time string".into());
|
||||
};
|
||||
|
||||
let final_time = multiplier * time;
|
||||
|
||||
Ok(final_time)
|
||||
}
|
||||
|
||||
fn pretty_time_short(seconds: i64) -> String {
|
||||
let days = (seconds as f32 / 86400.0).floor();
|
||||
let hour = ((seconds as f32 - (days * 86400.0)) / 3600.0).floor();
|
||||
let mins = ((seconds as f32 - (hour * 3600.0) - (days * 86400.0)) / 60.0).floor();
|
||||
let secs = seconds as f32 - (hour * 3600.0) - (mins * 60.0) - (days * 86400.0);
|
||||
|
||||
let days = if days > 0. {days.to_string() + "d"} else { "".into() };
|
||||
let hour = if hour > 0. {hour.to_string() + "h"} else { "".into() };
|
||||
let mins = if mins > 0. {mins.to_string() + "m"} else { "".into() };
|
||||
let secs = if secs > 0. {secs.to_string() + "s"} else { "".into() };
|
||||
|
||||
(days + " " + &hour + " " + &mins + " " + &secs)
|
||||
.trim()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn pretty_time_long(seconds: i64) -> String {
|
||||
let days = (seconds as f32 / 86400.0).floor();
|
||||
let hour = ((seconds as f32 - (days * 86400.0)) / 3600.0).floor();
|
||||
let mins = ((seconds as f32 - (hour * 3600.0) - (days * 86400.0)) / 60.0).floor();
|
||||
let secs = seconds as f32 - (hour * 3600.0) - (mins * 60.0) - (days * 86400.0);
|
||||
|
||||
let days = if days == 0.0 {
|
||||
"".to_string()
|
||||
} else if days == 1.0 {
|
||||
days.to_string() + " day"
|
||||
} else {
|
||||
days.to_string() + " days"
|
||||
};
|
||||
|
||||
let hour = if hour == 0.0 {
|
||||
"".to_string()
|
||||
} else if hour == 1.0 {
|
||||
hour.to_string() + " hour"
|
||||
} else {
|
||||
hour.to_string() + " hours"
|
||||
};
|
||||
|
||||
let mins = if mins == 0.0 {
|
||||
"".to_string()
|
||||
} else if mins == 1.0 {
|
||||
mins.to_string() + " minute"
|
||||
} else {
|
||||
mins.to_string() + " minutes"
|
||||
};
|
||||
|
||||
let secs = if secs == 0.0 {
|
||||
"".to_string()
|
||||
} else if secs == 1.0 {
|
||||
secs.to_string() + " second"
|
||||
} else {
|
||||
secs.to_string() + " seconds"
|
||||
};
|
||||
|
||||
(days + " " + &hour + " " + &mins + " " + &secs)
|
||||
.trim()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn exit_error(main_message: String, fix: Option<String>, fix_values: Option<Vec<String>>) -> ! {
|
||||
print_error_line(main_message);
|
||||
|
||||
if let Some(f) = fix {
|
||||
eprint!("{f} ");
|
||||
if let Some(v) = fix_values {
|
||||
let len = v.len() - 1;
|
||||
for (i, value) in v.iter().enumerate() {
|
||||
eprint!("{}", value.truecolor(234, 129, 100));
|
||||
if i != len {
|
||||
eprint!(", ");
|
||||
}
|
||||
}
|
||||
}
|
||||
eprintln!("\n");
|
||||
}
|
||||
|
||||
eprintln!("For more information, try '{}'", "--help".truecolor(246,199,219));
|
||||
std::process::exit(1)
|
||||
}
|
||||
|
||||
fn print_error_line(message: String) {
|
||||
eprintln!("{}: {message}", "Error".truecolor(181,66,127).italic().underline());
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 102 KiB |
|
@ -2,78 +2,68 @@ use std::{
|
|||
collections::{hash_map::Values, HashMap, HashSet},
|
||||
ffi::OsStr,
|
||||
fs::{self, File},
|
||||
io::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
|
||||
use blake3::Hash;
|
||||
use chrono::{DateTime, TimeDelta, Utc};
|
||||
use ciborium::{from_reader, into_writer};
|
||||
use log::{error, info, warn};
|
||||
use log::{info, warn};
|
||||
use rand::distributions::{Alphanumeric, DistString};
|
||||
use rocket::{
|
||||
form::{self, FromFormField, ValueField},
|
||||
serde::{Deserialize, Serialize},
|
||||
tokio::{select, sync::mpsc::Receiver, time},
|
||||
};
|
||||
use serde_with::{serde_as, DisplayFromStr};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct Mochibase {
|
||||
const BINCODE_CFG: Configuration = bincode::config::standard();
|
||||
|
||||
#[derive(Debug, Clone, Decode, Encode)]
|
||||
pub struct Database {
|
||||
path: PathBuf,
|
||||
|
||||
/// Every hash in the database along with the [`Mmid`]s associated with them
|
||||
#[bincode(with_serde)]
|
||||
hashes: HashMap<Hash, HashSet<Mmid>>,
|
||||
|
||||
/// All entries in the database
|
||||
#[bincode(with_serde)]
|
||||
entries: HashMap<Mmid, MochiFile>,
|
||||
}
|
||||
|
||||
impl Mochibase {
|
||||
/// Create a new database initialized with no data, and save it to the
|
||||
/// provided path
|
||||
pub fn new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||
impl Database {
|
||||
pub fn new<P: AsRef<Path>>(path: &P) -> Self {
|
||||
let mut file = File::create_new(path).expect("Could not create database!");
|
||||
|
||||
let output = Self {
|
||||
path: path.as_ref().to_path_buf(),
|
||||
entries: HashMap::new(),
|
||||
hashes: HashMap::new(),
|
||||
};
|
||||
|
||||
// Save the database initially after creating it
|
||||
output.save()?;
|
||||
encode_into_std_write(&output, &mut file, BINCODE_CFG).expect("Could not write database!");
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Open the database from a path
|
||||
pub fn open<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
from_reader(&mut file)
|
||||
.map_err(|e| io::Error::other(format!("failed to open database: {e}")))
|
||||
output
|
||||
}
|
||||
|
||||
/// Open the database from a path, **or create it if it does not exist**
|
||||
pub fn open_or_new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
|
||||
pub fn open<P: AsRef<Path>>(path: &P) -> Self {
|
||||
if !path.as_ref().exists() {
|
||||
Self::new(path)
|
||||
} else {
|
||||
Self::open(path)
|
||||
let mut file = File::open(path).expect("Could not get database file!");
|
||||
decode_from_std_read(&mut file, BINCODE_CFG).expect("Could not decode database")
|
||||
}
|
||||
}
|
||||
|
||||
/// Save the database to its file
|
||||
pub fn save(&self) -> Result<(), io::Error> {
|
||||
// Create a file and write the LZ4 compressed stream into it
|
||||
let mut file = File::create(self.path.with_extension("bkp"))?;
|
||||
into_writer(self, &mut file)
|
||||
.map_err(|e| io::Error::other(format!("failed to save database: {e}")))?;
|
||||
file.flush()?;
|
||||
pub fn save(&self) {
|
||||
let mut out_path = self.path.clone();
|
||||
out_path.set_extension(".bkp");
|
||||
let mut file = File::create(&out_path).expect("Could not save!");
|
||||
encode_into_std_write(self, &mut file, BINCODE_CFG).expect("Could not write out!");
|
||||
|
||||
fs::rename(self.path.with_extension("bkp"), &self.path).unwrap();
|
||||
|
||||
Ok(())
|
||||
fs::rename(out_path, &self.path).unwrap();
|
||||
}
|
||||
|
||||
/// Insert a [`MochiFile`] into the database.
|
||||
|
@ -140,18 +130,14 @@ impl Mochibase {
|
|||
self.entries.get(mmid)
|
||||
}
|
||||
|
||||
pub fn get_hash(&self, hash: &Hash) -> Option<&HashSet<Mmid>> {
|
||||
self.hashes.get(hash)
|
||||
}
|
||||
|
||||
pub fn entries(&self) -> Values<'_, Mmid, MochiFile> {
|
||||
self.entries.values()
|
||||
}
|
||||
}
|
||||
|
||||
/// An entry in the database storing metadata about a file
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct MochiFile {
|
||||
/// A unique identifier describing this file
|
||||
mmid: Mmid,
|
||||
|
@ -159,36 +145,40 @@ pub struct MochiFile {
|
|||
/// The original name of the file
|
||||
name: String,
|
||||
|
||||
/// The MIME type of the file
|
||||
mime_type: String,
|
||||
/// The format the file is, for serving
|
||||
extension: String,
|
||||
|
||||
/// The Blake3 hash of the file
|
||||
#[serde_as(as = "DisplayFromStr")]
|
||||
#[bincode(with_serde)]
|
||||
hash: Hash,
|
||||
|
||||
/// The datetime when the file was uploaded
|
||||
#[bincode(with_serde)]
|
||||
upload_datetime: DateTime<Utc>,
|
||||
|
||||
/// The datetime when the file is set to expire
|
||||
#[bincode(with_serde)]
|
||||
expiry_datetime: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl MochiFile {
|
||||
/// Create a new file that expires in `expiry`.
|
||||
pub fn new(
|
||||
pub fn new_with_expiry(
|
||||
mmid: Mmid,
|
||||
name: String,
|
||||
mime_type: String,
|
||||
extension: &str,
|
||||
hash: Hash,
|
||||
upload: DateTime<Utc>,
|
||||
expiry: DateTime<Utc>,
|
||||
expire_duration: TimeDelta,
|
||||
) -> Self {
|
||||
let current = Utc::now();
|
||||
let expiry = current + expire_duration;
|
||||
|
||||
Self {
|
||||
mmid,
|
||||
name,
|
||||
mime_type,
|
||||
extension: extension.to_string(),
|
||||
hash,
|
||||
upload_datetime: upload,
|
||||
upload_datetime: current,
|
||||
expiry_datetime: expiry,
|
||||
}
|
||||
}
|
||||
|
@ -214,14 +204,14 @@ impl MochiFile {
|
|||
&self.mmid
|
||||
}
|
||||
|
||||
pub fn mime_type(&self) -> &String {
|
||||
&self.mime_type
|
||||
pub fn extension(&self) -> &String {
|
||||
&self.extension
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean the database. Removes files which are past their expiry
|
||||
/// [`chrono::DateTime`]. Also removes files which no longer exist on the disk.
|
||||
pub fn clean_database(db: &Arc<RwLock<Mochibase>>, file_path: &Path) {
|
||||
fn clean_database(db: &Arc<RwLock<Database>>, file_path: &Path) {
|
||||
let mut database = db.write().unwrap();
|
||||
|
||||
// Add expired entries to the removal list
|
||||
|
@ -252,22 +242,38 @@ pub fn clean_database(db: &Arc<RwLock<Mochibase>>, file_path: &Path) {
|
|||
}
|
||||
}
|
||||
|
||||
info!("Cleaned database.\n\t| Removed {removed_entries} expired entries.\n\t| Removed {removed_files} no longer referenced files.");
|
||||
info!("Cleaned database. Removed {removed_entries} expired entries. Removed {removed_files} no longer referenced files.");
|
||||
|
||||
if let Err(e) = database.save() {
|
||||
error!("Failed to save database: {e}")
|
||||
database.save();
|
||||
}
|
||||
|
||||
/// A loop to clean the database periodically.
|
||||
pub async fn clean_loop(
|
||||
db: Arc<RwLock<Database>>,
|
||||
file_path: PathBuf,
|
||||
mut shutdown_signal: Receiver<()>,
|
||||
interval: TimeDelta,
|
||||
) {
|
||||
let mut interval = time::interval(interval.to_std().unwrap());
|
||||
|
||||
loop {
|
||||
select! {
|
||||
_ = interval.tick() => clean_database(&db, &file_path),
|
||||
_ = shutdown_signal.recv() => break,
|
||||
};
|
||||
}
|
||||
drop(database); // Just to be sure
|
||||
}
|
||||
|
||||
/// A unique identifier for an entry in the database, 8 characters long,
|
||||
/// consists of ASCII alphanumeric characters (`a-z`, `A-Z`, and `0-9`).
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Deserialize, Serialize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
#[derive(Decode, Encode)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Mmid(String);
|
||||
|
||||
impl Mmid {
|
||||
/// Create a new random MMID
|
||||
pub fn new_random() -> Self {
|
||||
pub fn new() -> Self {
|
||||
let string = Alphanumeric.sample_string(&mut rand::thread_rng(), 8);
|
||||
|
||||
Self(string)
|
||||
|
@ -324,120 +330,3 @@ impl std::fmt::Display for Mmid {
|
|||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromFormField<'r> for Mmid {
|
||||
fn from_value(field: ValueField<'r>) -> form::Result<'r, Self> {
|
||||
Ok(Self::try_from(field.value).map_err(|_| form::Error::validation("Invalid MMID"))?)
|
||||
}
|
||||
}
|
||||
|
||||
/// An in-memory database for partially uploaded chunks of files
|
||||
#[derive(Default, Debug)]
|
||||
pub struct Chunkbase {
|
||||
chunks: HashMap<Uuid, (DateTime<Utc>, ChunkedInfo)>,
|
||||
}
|
||||
|
||||
impl Chunkbase {
|
||||
/// Delete all temporary chunk files
|
||||
pub fn delete_all(&mut self) -> Result<(), io::Error> {
|
||||
for (_timeout, chunk) in self.chunks.values() {
|
||||
fs::remove_file(&chunk.path)?;
|
||||
}
|
||||
|
||||
self.chunks.clear();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_timed_out(&mut self) -> Result<(), io::Error> {
|
||||
let now = Utc::now();
|
||||
self.chunks.retain(|_u, (t, c)| {
|
||||
if *t <= now {
|
||||
let _ = fs::remove_file(&c.path);
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn new_file<P: AsRef<Path>>(&mut self, mut info: ChunkedInfo, temp_dir: &P, timeout: TimeDelta) -> Result<Uuid, io::Error> {
|
||||
let uuid = Uuid::new_v4();
|
||||
let expire = Utc::now() + timeout;
|
||||
info.path = temp_dir.as_ref().join(uuid.to_string());
|
||||
|
||||
self.chunks.insert(uuid, (expire, info.clone()));
|
||||
|
||||
fs::File::create_new(&info.path)?;
|
||||
|
||||
Ok(uuid)
|
||||
}
|
||||
|
||||
pub fn get_file(&self, uuid: &Uuid) -> Option<&(DateTime<Utc>, ChunkedInfo)> {
|
||||
self.chunks.get(uuid)
|
||||
}
|
||||
|
||||
pub fn remove_file(&mut self, uuid: &Uuid) -> Result<bool, io::Error> {
|
||||
let item = match self.chunks.remove(uuid) {
|
||||
Some(i) => i,
|
||||
None => return Ok(false),
|
||||
};
|
||||
|
||||
fs::remove_file(item.1.path)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn move_and_remove_file<P: AsRef<Path>>(&mut self, uuid: &Uuid, new_location: &P) -> Result<bool, io::Error> {
|
||||
let item = match self.chunks.remove(uuid) {
|
||||
Some(i) => i,
|
||||
None => return Ok(false),
|
||||
};
|
||||
|
||||
fs::rename(item.1.path, new_location)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn extend_timeout(&mut self, uuid: &Uuid, timeout: TimeDelta) -> bool {
|
||||
let item = match self.chunks.get_mut(uuid) {
|
||||
Some(i) => i,
|
||||
None => return false,
|
||||
};
|
||||
|
||||
item.0 = Utc::now() + timeout;
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn add_recieved_chunk(&mut self, uuid: &Uuid, chunk: u64) -> bool {
|
||||
let item = match self.chunks.get_mut(uuid) {
|
||||
Some(i) => i,
|
||||
None => return false,
|
||||
};
|
||||
|
||||
item.1.recieved_chunks.insert(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about how to manage partially uploaded chunks of files
|
||||
#[serde_as]
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct ChunkedInfo {
|
||||
pub name: String,
|
||||
pub size: u64,
|
||||
#[serde_as(as = "serde_with::DurationSeconds<i64>")]
|
||||
pub expire_duration: TimeDelta,
|
||||
|
||||
/// Tracks which chunks have already been recieved, so you can't overwrite
|
||||
/// some wrong part of a file
|
||||
#[serde(skip)]
|
||||
pub recieved_chunks: HashSet<u64>,
|
||||
#[serde(skip)]
|
||||
pub path: PathBuf,
|
||||
#[serde(skip)]
|
||||
pub offset: u64,
|
||||
}
|
|
@ -1,15 +1,17 @@
|
|||
use std::{
|
||||
str::FromStr,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use rocket::{
|
||||
get, http::ContentType, response::{self, Redirect, Responder, Response}, serde::{self, json::Json}, tokio::{self, fs::File}, uri, Request, State
|
||||
get,
|
||||
http::ContentType,
|
||||
response::Redirect,
|
||||
serde::{self, json::Json},
|
||||
tokio::fs::File,
|
||||
uri, State,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
database::{Mmid, MochiFile, Mochibase},
|
||||
database::{Database, Mmid},
|
||||
settings::Settings,
|
||||
};
|
||||
|
||||
|
@ -30,15 +32,6 @@ pub fn server_info(settings: &State<Settings>) -> Json<ServerInfo> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Get information about a file
|
||||
#[get("/info/<mmid>")]
|
||||
pub async fn file_info(db: &State<Arc<RwLock<Mochibase>>>, mmid: &str) -> Option<Json<MochiFile>> {
|
||||
let mmid: Mmid = mmid.try_into().ok()?;
|
||||
let entry = db.read().unwrap().get(&mmid).cloned()?;
|
||||
|
||||
Some(Json(entry))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct ServerInfo {
|
||||
|
@ -50,7 +43,7 @@ pub struct ServerInfo {
|
|||
}
|
||||
|
||||
#[get("/f/<mmid>")]
|
||||
pub async fn lookup_mmid(db: &State<Arc<RwLock<Mochibase>>>, mmid: &str) -> Option<Redirect> {
|
||||
pub async fn lookup_mmid(db: &State<Arc<RwLock<Database>>>, mmid: &str) -> Option<Redirect> {
|
||||
let mmid: Mmid = mmid.try_into().ok()?;
|
||||
let entry = db.read().unwrap().get(&mmid).cloned()?;
|
||||
|
||||
|
@ -60,65 +53,37 @@ pub async fn lookup_mmid(db: &State<Arc<RwLock<Mochibase>>>, mmid: &str) -> Opti
|
|||
))))
|
||||
}
|
||||
|
||||
#[get("/f/<mmid>?noredir&<download>")]
|
||||
#[get("/f/<mmid>?noredir")]
|
||||
pub async fn lookup_mmid_noredir(
|
||||
db: &State<Arc<RwLock<Mochibase>>>,
|
||||
db: &State<Arc<RwLock<Database>>>,
|
||||
settings: &State<Settings>,
|
||||
mmid: &str,
|
||||
download: bool,
|
||||
) -> Option<FileDownloader> {
|
||||
) -> Option<(ContentType, File)> {
|
||||
let mmid: Mmid = mmid.try_into().ok()?;
|
||||
|
||||
let entry = db.read().unwrap().get(&mmid).cloned()?;
|
||||
|
||||
let file = File::open(settings.file_dir.join(entry.hash().to_string()))
|
||||
.await
|
||||
.ok()?;
|
||||
|
||||
Some(FileDownloader {
|
||||
inner: file,
|
||||
filename: entry.name().clone(),
|
||||
content_type: ContentType::from_str(entry.mime_type()).unwrap_or(ContentType::Binary),
|
||||
disposition: download
|
||||
})
|
||||
dbg!(entry.extension());
|
||||
|
||||
Some((
|
||||
ContentType::from_extension(entry.extension()).unwrap_or(ContentType::Binary),
|
||||
file,
|
||||
))
|
||||
}
|
||||
|
||||
pub struct FileDownloader {
|
||||
inner: tokio::fs::File,
|
||||
filename: String,
|
||||
content_type: ContentType,
|
||||
disposition: bool,
|
||||
}
|
||||
|
||||
impl<'r> Responder<'r, 'r> for FileDownloader {
|
||||
fn respond_to(self, _: &'r Request<'_>) -> response::Result<'r> {
|
||||
let mut resp = Response::build();
|
||||
resp.streamed_body(self.inner)
|
||||
.header(self.content_type);
|
||||
|
||||
if self.disposition {
|
||||
resp.raw_header(
|
||||
"Content-Disposition",
|
||||
format!(
|
||||
"attachment; filename=\"{}\"; filename*=UTF-8''{}",
|
||||
unidecode::unidecode(&self.filename),
|
||||
urlencoding::encode(&self.filename)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
resp.ok()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[get("/f/<mmid>/<name>")]
|
||||
pub async fn lookup_mmid_name(
|
||||
db: &State<Arc<RwLock<Mochibase>>>,
|
||||
db: &State<Arc<RwLock<Database>>>,
|
||||
settings: &State<Settings>,
|
||||
mmid: &str,
|
||||
name: &str,
|
||||
) -> Option<(ContentType, File)> {
|
||||
let mmid: Mmid = mmid.try_into().ok()?;
|
||||
|
||||
let entry = db.read().unwrap().get(&mmid).cloned()?;
|
||||
|
||||
// If the name does not match, then this is invalid
|
||||
|
@ -131,7 +96,7 @@ pub async fn lookup_mmid_name(
|
|||
.ok()?;
|
||||
|
||||
Some((
|
||||
ContentType::from_str(entry.mime_type()).unwrap_or(ContentType::Binary),
|
||||
ContentType::from_extension(entry.extension()).unwrap_or(ContentType::Binary),
|
||||
file,
|
||||
))
|
||||
}
|
261
src/main.rs
Normal file
261
src/main.rs
Normal file
|
@ -0,0 +1,261 @@
|
|||
mod database;
|
||||
mod endpoints;
|
||||
mod settings;
|
||||
mod strings;
|
||||
mod utils;
|
||||
mod pages;
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use chrono::{DateTime, TimeDelta, Utc};
|
||||
use database::{clean_loop, Database, Mmid, MochiFile};
|
||||
use endpoints::{lookup_mmid, lookup_mmid_name, lookup_mmid_noredir, server_info};
|
||||
use log::info;
|
||||
use maud::{html, Markup, PreEscaped};
|
||||
use pages::{api_info, footer, head};
|
||||
use rocket::{
|
||||
data::{Limits, ToByteUnit}, form::Form, fs::TempFile, get, http::ContentType, post, response::content::{RawCss, RawJavaScript}, routes, serde::{json::Json, Serialize}, tokio, Config, FromForm, State
|
||||
};
|
||||
use settings::Settings;
|
||||
use strings::{parse_time_string, to_pretty_time};
|
||||
use utils::hash_file;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Stylesheet
|
||||
#[get("/main.css")]
|
||||
fn stylesheet() -> RawCss<&'static str> {
|
||||
RawCss(include_str!("../web/main.css"))
|
||||
}
|
||||
|
||||
/// Upload handler javascript
|
||||
#[get("/request.js")]
|
||||
fn form_handler_js() -> RawJavaScript<&'static str> {
|
||||
RawJavaScript(include_str!("../web/request.js"))
|
||||
}
|
||||
|
||||
#[get("/favicon.svg")]
|
||||
fn favicon() -> (ContentType, &'static str) {
|
||||
(ContentType::SVG, include_str!("../web/favicon.svg"))
|
||||
}
|
||||
|
||||
#[get("/")]
|
||||
fn home(settings: &State<Settings>) -> Markup {
|
||||
html! {
|
||||
(head("Confetti-Box"))
|
||||
script src="./request.js" { }
|
||||
|
||||
center {
|
||||
h1 { "Confetti-Box 🎉" }
|
||||
h2 { "Files up to " (settings.max_filesize.bytes()) " in size are allowed!" }
|
||||
hr;
|
||||
button.main_file_upload #fileButton onclick="document.getElementById('fileInput').click()" {
|
||||
h4 { "Upload File(s)" }
|
||||
p { "Click or Drag and Drop" }
|
||||
}
|
||||
h3 { "Expire after:" }
|
||||
div id="durationBox" {
|
||||
@for d in &settings.duration.allowed {
|
||||
button.button.{@if settings.duration.default == *d { "selected" }}
|
||||
data-duration-seconds=(d.num_seconds())
|
||||
{
|
||||
(PreEscaped(to_pretty_time(d.num_seconds() as u32)))
|
||||
}
|
||||
}
|
||||
}
|
||||
form #uploadForm {
|
||||
// It's stupid how these can't be styled so they're just hidden here...
|
||||
input #fileDuration type="text" name="duration" minlength="2"
|
||||
maxlength="7" value=(settings.duration.default.num_seconds().to_string() + "s") style="display:none;";
|
||||
input #fileInput type="file" name="fileUpload" multiple
|
||||
onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;";
|
||||
}
|
||||
hr;
|
||||
|
||||
h3 { "Uploaded Files" }
|
||||
div #uploadedFilesDisplay {
|
||||
|
||||
}
|
||||
|
||||
hr;
|
||||
(footer())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, FromForm)]
|
||||
struct Upload<'r> {
|
||||
#[field(name = "duration")]
|
||||
expire_time: String,
|
||||
|
||||
#[field(name = "fileUpload")]
|
||||
file: TempFile<'r>,
|
||||
}
|
||||
|
||||
/// Handle a file upload and store it
|
||||
#[post("/upload", data = "<file_data>")]
|
||||
async fn handle_upload(
|
||||
mut file_data: Form<Upload<'_>>,
|
||||
db: &State<Arc<RwLock<Database>>>,
|
||||
settings: &State<Settings>,
|
||||
) -> Result<Json<ClientResponse>, std::io::Error> {
|
||||
// Ensure the expiry time is valid, if not return an error
|
||||
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) {
|
||||
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
|
||||
return Ok(Json(ClientResponse::failure("Duration not allowed")));
|
||||
}
|
||||
|
||||
if t > settings.duration.maximum {
|
||||
return Ok(Json(ClientResponse::failure("Duration larger than max")));
|
||||
}
|
||||
|
||||
t
|
||||
} else {
|
||||
return Ok(Json(ClientResponse::failure("Duration invalid")));
|
||||
};
|
||||
|
||||
let raw_name = file_data
|
||||
.file
|
||||
.raw_name()
|
||||
.unwrap()
|
||||
.dangerous_unsafe_unsanitized_raw()
|
||||
.as_str()
|
||||
.to_string();
|
||||
|
||||
// Get temp path for the file
|
||||
let temp_filename = settings.temp_dir.join(Uuid::new_v4().to_string());
|
||||
file_data.file.persist_to(&temp_filename).await?;
|
||||
|
||||
// Get hash and random identifier
|
||||
let file_mmid = Mmid::new();
|
||||
let file_hash = hash_file(&temp_filename).await?;
|
||||
|
||||
// Process filetype
|
||||
let file_type = file_format::FileFormat::from_file(&temp_filename)?;
|
||||
|
||||
let constructed_file = MochiFile::new_with_expiry(
|
||||
file_mmid.clone(),
|
||||
raw_name,
|
||||
file_type.extension(),
|
||||
file_hash,
|
||||
expire_time,
|
||||
);
|
||||
|
||||
// Move it to the new proper place
|
||||
std::fs::rename(temp_filename, settings.file_dir.join(file_hash.to_string()))?;
|
||||
|
||||
db.write().unwrap().insert(&file_mmid, constructed_file.clone());
|
||||
db.write().unwrap().save();
|
||||
|
||||
Ok(Json(ClientResponse {
|
||||
status: true,
|
||||
name: constructed_file.name().clone(),
|
||||
mmid: Some(file_mmid),
|
||||
hash: file_hash.to_string(),
|
||||
expires: Some(constructed_file.expiry()),
|
||||
..Default::default()
|
||||
}))
|
||||
}
|
||||
|
||||
/// A response to the client from the server
|
||||
#[derive(Serialize, Default, Debug)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct ClientResponse {
|
||||
/// Success or failure
|
||||
pub status: bool,
|
||||
|
||||
pub response: &'static str,
|
||||
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mmid: Option<Mmid>,
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub hash: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expires: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl ClientResponse {
|
||||
fn failure(response: &'static str) -> Self {
|
||||
Self {
|
||||
status: false,
|
||||
response,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::main]
|
||||
async fn main() {
|
||||
// Get or create config file
|
||||
let config = Settings::open(&"./settings.toml").expect("Could not open settings file");
|
||||
|
||||
if !config.temp_dir.try_exists().is_ok_and(|e| e) {
|
||||
fs::create_dir_all(config.temp_dir.clone()).expect("Failed to create temp directory");
|
||||
}
|
||||
|
||||
if !config.file_dir.try_exists().is_ok_and(|e| e) {
|
||||
fs::create_dir_all(config.file_dir.clone()).expect("Failed to create file directory");
|
||||
}
|
||||
|
||||
// Set rocket configuration settings
|
||||
let rocket_config = Config {
|
||||
address: config.server.address.parse().expect("IP address invalid"),
|
||||
port: config.server.port,
|
||||
temp_dir: config.temp_dir.clone().into(),
|
||||
limits: Limits::default()
|
||||
.limit("data-form", config.max_filesize.bytes())
|
||||
.limit("file", config.max_filesize.bytes()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let database = Arc::new(RwLock::new(Database::open(&config.database_path)));
|
||||
let local_db = database.clone();
|
||||
|
||||
// Start monitoring thread, cleaning the database every 2 minutes
|
||||
let (shutdown, rx) = tokio::sync::mpsc::channel(1);
|
||||
tokio::spawn({
|
||||
let cleaner_db = database.clone();
|
||||
let file_path = config.file_dir.clone();
|
||||
async move { clean_loop(cleaner_db, file_path, rx, TimeDelta::minutes(2)).await }
|
||||
});
|
||||
|
||||
let rocket = rocket::build()
|
||||
.mount(
|
||||
config.server.root_path.clone() + "/",
|
||||
routes![
|
||||
home,
|
||||
api_info,
|
||||
handle_upload,
|
||||
form_handler_js,
|
||||
stylesheet,
|
||||
server_info,
|
||||
favicon,
|
||||
lookup_mmid,
|
||||
lookup_mmid_noredir,
|
||||
lookup_mmid_name,
|
||||
],
|
||||
)
|
||||
.manage(database)
|
||||
.manage(config)
|
||||
.configure(rocket_config)
|
||||
.launch()
|
||||
.await;
|
||||
|
||||
// Ensure the server gracefully shuts down
|
||||
rocket.expect("Server failed to shutdown gracefully");
|
||||
|
||||
info!("Stopping database cleaning thread...");
|
||||
shutdown
|
||||
.send(())
|
||||
.await
|
||||
.expect("Failed to stop cleaner thread.");
|
||||
info!("Stopping database cleaning thread completed successfully.");
|
||||
|
||||
info!("Saving database on shutdown...");
|
||||
local_db.write().unwrap().save();
|
||||
info!("Saving database completed successfully.");
|
||||
}
|
119
src/pages.rs
Normal file
119
src/pages.rs
Normal file
|
@ -0,0 +1,119 @@
|
|||
use maud::{html, Markup, DOCTYPE};
|
||||
use rocket::{get, State};
|
||||
|
||||
use crate::settings::Settings;
|
||||
|
||||
pub fn head(page_title: &str) -> Markup {
|
||||
html! {
|
||||
(DOCTYPE)
|
||||
meta charset="UTF-8";
|
||||
meta name="viewport" content="width=device-width, initial-scale=1";
|
||||
title { (page_title) }
|
||||
link rel="icon" type="image/svg+xml" href="favicon.svg";
|
||||
link rel="stylesheet" href="./main.css";
|
||||
}
|
||||
}
|
||||
|
||||
pub fn footer() -> Markup {
|
||||
html! {
|
||||
footer {
|
||||
p {a href="/" {"Home"}}
|
||||
p {a href="https://github.com/G2-Games/confetti-box" {"Source"}}
|
||||
p {a href="https://g2games.dev/" {"My Website"}}
|
||||
p {a href="api" {"API"}}
|
||||
p {a href="https://ko-fi.com/g2_games" {"Donate"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/api")]
|
||||
pub fn api_info(settings: &State<Settings>) -> Markup {
|
||||
let domain = &settings.server.domain;
|
||||
let root = &settings.server.root_path;
|
||||
html! {
|
||||
(head("Confetti-Box | API"))
|
||||
|
||||
center {
|
||||
h1 { "API Information" }
|
||||
hr;
|
||||
|
||||
div style="text-align: left;" {
|
||||
p {
|
||||
"Confetti-Box is designed to be simple to access using its
|
||||
API. All endpoints are accessed following "
|
||||
code{"https://"(domain) (root)} ". All responses are encoded
|
||||
in JSON. MMIDs are a unique identifier for a file returned by
|
||||
the server after a successful " code{"/upload"} " request."
|
||||
}
|
||||
p {
|
||||
"The following endpoints are supported:"
|
||||
}
|
||||
|
||||
h2 { code {"/upload"} }
|
||||
pre { r#"POST duration=String fileUpload=Bytes -> JSON"# }
|
||||
p {
|
||||
"To upload files, " code{"POST"} " a multipart form
|
||||
containing the fields " code{"duration"} " and "
|
||||
code{"fileData"} " to this endpoint. " code{"duration"}
|
||||
" MUST be a string formatted like " code{"1H"}", where
|
||||
the number MUST be a valid number and the letter MUST be
|
||||
one of " b{"S"} "(econd), " b{"M"}"(inute), " b{"H"}"(our), "
|
||||
b{"D"}"(ay). The " code{"/info"} " endpoint returns valid
|
||||
durations and maximum file sizes."
|
||||
}
|
||||
p {
|
||||
"Example successful response:"
|
||||
}
|
||||
pre {
|
||||
"{\n\t\"status\": true,\n\t\"response\": \"\",\n\t\"name\": \"1600-1200.jpg\",\n\t\"mmid\": \"xNLF6ogx\",\n\t\"hash\": \"1f12137f2c263d9e6d686e90c687a55d46d064fe6eeda7e4c39158d20ce1f071\",\n\t\"expires\": \"2024-10-28T11:59:25.024373438Z\"\n}"
|
||||
}
|
||||
p {"Example failure response:"}
|
||||
pre {
|
||||
"{\n\t\"status\": false,\n\t\"response\": \"Duration invalid\",\n}"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/info"} }
|
||||
pre { r#"GET -> JSON"# }
|
||||
p {
|
||||
"Returns the capabilities of the server."
|
||||
}
|
||||
p {"Example response:"}
|
||||
pre {
|
||||
"{\n\t\"max_filesize\": 5000000000,\n\t\"max_duration\": 259200,\n\t\"default_duration\": 21600,\n\t\"allowed_durations\": [\n\t\t3600,\n\t\t21600,\n\t\t86400,\n\t\t172800\n\t]\n}"
|
||||
}
|
||||
|
||||
hr;
|
||||
h2 { code {"/f/<mmid>"} }
|
||||
pre { r#"GET mmid=MMID -> Redirect or File"# }
|
||||
p {
|
||||
"By default issues a redirect to the full URL for a file. This
|
||||
behavior can be modified by appending " code{"?noredir"} " to
|
||||
the end of this request, like " code{"/f/<mmid>?noredir"} ",
|
||||
in which case it behaves just like " code{"/f/<mmid>/<filename>"}
|
||||
}
|
||||
p {"Example default response:"}
|
||||
pre {"303: /f/xNLF6ogx/1600-1200.jpg"}
|
||||
|
||||
p {"Example modified response:"}
|
||||
pre {"<File Bytes>"}
|
||||
|
||||
hr;
|
||||
h2 { code {"/f/<mmid>/<filename>"} }
|
||||
pre { r#"GET mmid=MMID filename=String -> File"# }
|
||||
p {
|
||||
"Returns the contents of the file corresponding to the
|
||||
requested MMID, but with the corresponding filename so as
|
||||
to preserve it for downloads. Mostly for use by browsers."
|
||||
}
|
||||
p {"Example response:"}
|
||||
pre {
|
||||
"<File Bytes>"
|
||||
}
|
||||
}
|
||||
|
||||
hr;
|
||||
(footer())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,31 +11,34 @@ use serde_with::serde_as;
|
|||
|
||||
/// A response to the client from the server
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
#[serde(default)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct Settings {
|
||||
/// Maximum filesize in bytes
|
||||
#[serde(default)]
|
||||
pub max_filesize: u64,
|
||||
|
||||
/// Maximum filesize in bytes
|
||||
pub chunk_size: u64,
|
||||
|
||||
/// Is overwiting already uploaded files with the same hash allowed, or is
|
||||
/// this a no-op?
|
||||
#[serde(default)]
|
||||
pub overwrite: bool,
|
||||
|
||||
/// Settings pertaining to duration information
|
||||
pub duration: DurationSettings,
|
||||
|
||||
/// The path to the database file
|
||||
#[serde(default)]
|
||||
pub database_path: PathBuf,
|
||||
|
||||
/// Temporary directory for stuff
|
||||
#[serde(default)]
|
||||
pub temp_dir: PathBuf,
|
||||
|
||||
/// Directory in which to store hosted files
|
||||
#[serde(default)]
|
||||
pub file_dir: PathBuf,
|
||||
|
||||
/// Settings pertaining to the server configuration
|
||||
#[serde(default)]
|
||||
pub server: ServerSettings,
|
||||
|
||||
#[serde(skip)]
|
||||
|
@ -45,8 +48,7 @@ pub struct Settings {
|
|||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_filesize: 25.megabytes().into(), // 1 MB
|
||||
chunk_size: 10.megabytes().into(),
|
||||
max_filesize: 1.megabytes().into(), // 128 MB
|
||||
overwrite: true,
|
||||
duration: DurationSettings::default(),
|
||||
server: ServerSettings::default(),
|
||||
|
@ -79,11 +81,11 @@ impl Settings {
|
|||
}
|
||||
|
||||
pub fn save(&self) -> Result<(), io::Error> {
|
||||
let out_path = &self.path.with_extension("new");
|
||||
let mut file = File::create(out_path)?;
|
||||
let mut out_path = self.path.clone();
|
||||
out_path.set_extension(".bkp");
|
||||
let mut file = File::create(&out_path).expect("Could not save!");
|
||||
file.write_all(&toml::to_string_pretty(self).unwrap().into_bytes())?;
|
||||
|
||||
// Overwrite the original DB with
|
||||
fs::rename(out_path, &self.path).unwrap();
|
||||
|
||||
Ok(())
|
|
@ -1,7 +1,7 @@
|
|||
use blake3::Hash;
|
||||
use std::path::Path;
|
||||
|
||||
/// Get the Blake3 hash of a file, without reading it all into memory
|
||||
/// Get the Blake3 hash of a file, without reading it all into memory, and also get the size
|
||||
pub async fn hash_file<P: AsRef<Path>>(input: &P) -> Result<Hash, std::io::Error> {
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
hasher.update_mmap_rayon(input)?;
|
Before Width: | Height: | Size: 3.1 KiB After Width: | Height: | Size: 3.1 KiB |
|
@ -1,21 +1,5 @@
|
|||
@font-face {
|
||||
font-family: "Roboto";
|
||||
src:
|
||||
local("Roboto"),
|
||||
url("/resources/fonts/Roboto.woff2");
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Fira Code";
|
||||
src:
|
||||
local("Fira Code"),
|
||||
url("/resources/fonts/FiraCode.woff2");
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Roboto", sans-serif;
|
||||
font-size: 12pt;
|
||||
font-optical-sizing: auto;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
|
||||
center {
|
||||
|
@ -24,38 +8,22 @@ center {
|
|||
}
|
||||
|
||||
footer {
|
||||
div {
|
||||
display: flex;
|
||||
width: fit-content;
|
||||
display: flex;
|
||||
width: fit-content;
|
||||
|
||||
p {
|
||||
border-right: 2px dotted grey;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
p:last-child {
|
||||
border-right: none;
|
||||
}
|
||||
p {
|
||||
border-right: 1px dotted grey;
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
p.version {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
opacity: 45%;
|
||||
p:last-child {
|
||||
border-right: none;
|
||||
}
|
||||
}
|
||||
|
||||
hr {
|
||||
background-color: gray;
|
||||
width: 100%;
|
||||
height: 2px;
|
||||
display: block;
|
||||
border: none;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 3em;
|
||||
font-weight: bold;
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
p {
|
||||
|
@ -75,7 +43,6 @@ button {
|
|||
cursor: pointer;
|
||||
margin: 5px;
|
||||
border-radius: 5px;
|
||||
color: black;
|
||||
}
|
||||
|
||||
button.button {
|
||||
|
@ -110,24 +77,21 @@ button.main_file_upload {
|
|||
}
|
||||
|
||||
pre {
|
||||
font-family: "Fira Code", monospace;
|
||||
color: white;
|
||||
background-color: #161b22;
|
||||
font-size: 11pt;
|
||||
padding: 10px;
|
||||
overflow: auto;
|
||||
overflow: scroll;
|
||||
tab-size: 4;
|
||||
}
|
||||
|
||||
p code {
|
||||
font-family: "Fira Code", monospace;
|
||||
background-color: lightgray;
|
||||
font-size: 12pt;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
h2 code {
|
||||
font-family: "Fira Code", monospace;
|
||||
font-size: 15pt;
|
||||
}
|
||||
|
||||
|
@ -152,19 +116,15 @@ h2 code {
|
|||
}
|
||||
|
||||
#uploadedFilesDisplay p.file_name {
|
||||
width: 50%;
|
||||
overflow: clip;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
display: block;
|
||||
flex-shrink: 2;
|
||||
flex-basis: 100%;
|
||||
min-width: 0;
|
||||
max-width: 70%;
|
||||
}
|
||||
|
||||
#uploadedFilesDisplay p.status {
|
||||
font-family: "Fira Code", monospace;
|
||||
font-weight: 500;
|
||||
font-family: monospace;
|
||||
font-size: 11pt;
|
||||
overflow: clip;
|
||||
text-overflow: ellipsis;
|
||||
|
@ -173,7 +133,6 @@ h2 code {
|
|||
margin: auto 0;
|
||||
margin-left: auto;
|
||||
width: min-content;
|
||||
min-width: 32px;
|
||||
flex-shrink: 2;
|
||||
display: block;
|
||||
}
|
||||
|
@ -190,10 +149,8 @@ h2 code {
|
|||
#uploadedFilesDisplay > div > progress {
|
||||
height: 20px;
|
||||
margin: auto;
|
||||
flex-grow: 2;
|
||||
display: block;
|
||||
width: 100%;
|
||||
flex-shrink: 2;
|
||||
max-width: 35%;
|
||||
}
|
||||
|
||||
#uploadedFilesDisplay button {
|
||||
|
@ -203,41 +160,6 @@ h2 code {
|
|||
background-color: white;
|
||||
}
|
||||
|
||||
.upload_inprogress {
|
||||
|
||||
}
|
||||
|
||||
.upload_failed {
|
||||
color: black;
|
||||
background-color: #ffb2ae;
|
||||
|
||||
a:link {
|
||||
all: revert;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
all: revert;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
all: revert;
|
||||
}
|
||||
}
|
||||
|
||||
.upload_done {
|
||||
color: black;
|
||||
background-color: #a4ffbb;
|
||||
a:link {
|
||||
all: revert;
|
||||
}
|
||||
a:visited {
|
||||
all: revert;
|
||||
}
|
||||
a:hover {
|
||||
all: revert;
|
||||
}
|
||||
}
|
||||
|
||||
progress {
|
||||
--color: #84FFAE; /* the progress color */
|
||||
--background: lightgrey; /* the background color */
|
||||
|
@ -285,41 +207,14 @@ progress:not([value])::-moz-progress-bar {
|
|||
border-radius: 5px;
|
||||
box-shadow: 0 2px 5px rgba(0, 0, 0, 0.25) inset;
|
||||
width: 20%;
|
||||
animation-name: bounce;
|
||||
animation-name: example;
|
||||
animation-duration: 1s;
|
||||
animation-iteration-count: infinite;
|
||||
animation-direction: alternate;
|
||||
animation-timing-function: cubic-bezier(.17,.67,.83,.67);
|
||||
}
|
||||
|
||||
@keyframes bounce {
|
||||
@keyframes example {
|
||||
from {margin-left: 0%}
|
||||
to {margin-left: 80%}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
body {
|
||||
background-color: #131316;
|
||||
color: #ededed;
|
||||
}
|
||||
|
||||
a:link {
|
||||
color: #3c9fe5;
|
||||
}
|
||||
|
||||
a:visited {
|
||||
color: #37d6a7;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #79d646;
|
||||
}
|
||||
|
||||
p code {
|
||||
color: black;
|
||||
white-space: pre;
|
||||
background-color: lightgray;
|
||||
font-size: 12pt;
|
||||
padding: 2px;
|
||||
}
|
||||
}
|
203
web/request.js
Normal file
203
web/request.js
Normal file
|
@ -0,0 +1,203 @@
|
|||
/*jshint esversion: 11 */
|
||||
|
||||
const TOO_LARGE_TEXT = "Too large!";
|
||||
const ZERO_TEXT = "File is blank!";
|
||||
const ERROR_TEXT = "Error!";
|
||||
|
||||
async function formSubmit() {
|
||||
const form = document.getElementById("uploadForm");
|
||||
const files = form.elements.fileUpload.files;
|
||||
const duration = form.elements.duration.value;
|
||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||
|
||||
await fileSend(files, duration, maxSize);
|
||||
|
||||
// Reset the form file data since we've successfully submitted it
|
||||
form.elements.fileUpload.value = "";
|
||||
}
|
||||
|
||||
async function dragDropSubmit(evt) {
|
||||
const form = document.getElementById("uploadForm");
|
||||
const duration = form.elements.duration.value;
|
||||
|
||||
const files = getDroppedFiles(evt);
|
||||
|
||||
await fileSend(files, duration);
|
||||
}
|
||||
|
||||
function getDroppedFiles(evt) {
|
||||
evt.preventDefault();
|
||||
|
||||
const files = [];
|
||||
if (evt.dataTransfer.items) {
|
||||
// Use DataTransferItemList interface to access the file(s)
|
||||
[...evt.dataTransfer.items].forEach((item, _) => {
|
||||
// If dropped items aren't files, reject them
|
||||
if (item.kind === "file") {
|
||||
files.push(item.getAsFile());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Use DataTransfer interface to access the file(s)
|
||||
[...evt.dataTransfer.files].forEach((file, _) => {
|
||||
files.push(file.name);
|
||||
});
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
async function fileSend(files, duration, maxSize) {
|
||||
for (const file of files) {
|
||||
const [linkRow, progressBar, progressText] = addNewToList(file.name);
|
||||
if (file.size > maxSize) {
|
||||
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
|
||||
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
||||
continue;
|
||||
} else if (file.size == 0) {
|
||||
makeErrored(progressBar, progressText, linkRow, ZERO_TEXT);
|
||||
console.error("Provided file has 0 bytes");
|
||||
continue;
|
||||
}
|
||||
|
||||
const request = new XMLHttpRequest();
|
||||
request.open('POST', "./upload", true);
|
||||
|
||||
// Set up event listeners
|
||||
request.upload.addEventListener('progress',
|
||||
(p) => {uploadProgress(p, progressBar, progressText, linkRow);}, false);
|
||||
request.addEventListener('load',
|
||||
(c) => {uploadComplete(c, progressBar, progressText, linkRow);}, false);
|
||||
request.addEventListener('error',
|
||||
(e) => {networkErrorHandler(e, progressBar, progressText, linkRow);}, false);
|
||||
|
||||
// Create and send FormData
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("duration", duration);
|
||||
formData.append("fileUpload", file);
|
||||
request.send(formData);
|
||||
} catch (e) {
|
||||
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
|
||||
console.error("An error occured while uploading", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeErrored(progressBar, progressText, linkRow, errorMessage) {
|
||||
progressText.textContent = errorMessage;
|
||||
progressBar.style.display = "none";
|
||||
linkRow.style.background = "#ffb2ae";
|
||||
}
|
||||
|
||||
function makeFinished(progressBar, progressText, linkRow, response) {
|
||||
progressText.textContent = "";
|
||||
const _name = encodeURIComponent(response.name);
|
||||
const link = progressText.appendChild(document.createElement("a"));
|
||||
link.textContent = response.mmid;
|
||||
link.href = "/f/" + response.mmid;
|
||||
link.target = "_blank";
|
||||
|
||||
let button = linkRow.appendChild(document.createElement("button"));
|
||||
button.textContent = "📝";
|
||||
let buttonTimeout = null;
|
||||
button.addEventListener('click', function(_e) {
|
||||
const mmid = response.mmid;
|
||||
if (buttonTimeout) {
|
||||
clearTimeout(buttonTimeout);
|
||||
}
|
||||
navigator.clipboard.writeText(
|
||||
window.location.protocol + "//" + window.location.host + "/f/" + mmid
|
||||
);
|
||||
button.textContent = "✅";
|
||||
buttonTimeout = setTimeout(function() {
|
||||
button.textContent = "📝";
|
||||
}, 750);
|
||||
});
|
||||
|
||||
progressBar.style.display = "none";
|
||||
linkRow.style.background = "#a4ffbb";
|
||||
}
|
||||
|
||||
function networkErrorHandler(err, progressBar, progressText, linkRow) {
|
||||
makeErrored(progressBar, progressText, linkRow, "A network error occured");
|
||||
console.error("A network error occured while uploading", err);
|
||||
}
|
||||
|
||||
function uploadProgress(progress, progressBar, progressText, _linkRow) {
|
||||
if (progress.lengthComputable) {
|
||||
const progressPercent = Math.floor((progress.loaded / progress.total) * 100);
|
||||
if (progressPercent == 100) {
|
||||
progressBar.removeAttribute("value");
|
||||
progressText.textContent = "⏳";
|
||||
} else {
|
||||
progressBar.value = progressPercent;
|
||||
progressText.textContent = progressPercent + "%";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function uploadComplete(response, progressBar, progressText, linkRow) {
|
||||
let target = response.target;
|
||||
|
||||
if (target.status === 200) {
|
||||
const response = JSON.parse(target.responseText);
|
||||
|
||||
if (response.status) {
|
||||
console.log("Successfully uploaded file", response);
|
||||
makeFinished(progressBar, progressText, linkRow, response);
|
||||
} else {
|
||||
console.error("Error uploading", response);
|
||||
makeErrored(progressBar, progressText, linkRow, response.response);
|
||||
}
|
||||
} else if (target.status === 413) {
|
||||
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
|
||||
} else {
|
||||
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
|
||||
}
|
||||
}
|
||||
|
||||
function addNewToList(origFileName) {
|
||||
const uploadedFilesDisplay = document.getElementById("uploadedFilesDisplay");
|
||||
const linkRow = uploadedFilesDisplay.appendChild(document.createElement("div"));
|
||||
const fileName = linkRow.appendChild(document.createElement("p"));
|
||||
const progressBar = linkRow.appendChild(document.createElement("progress"));
|
||||
const progressTxt = linkRow.appendChild(document.createElement("p"));
|
||||
|
||||
fileName.textContent = origFileName;
|
||||
fileName.classList.add("file_name");
|
||||
progressTxt.classList.add("status");
|
||||
progressBar.max="100";
|
||||
progressBar.value="0";
|
||||
|
||||
return [linkRow, progressBar, progressTxt];
|
||||
}
|
||||
|
||||
async function initEverything() {
|
||||
const durationBox = document.getElementById("durationBox");
|
||||
const durationButtons = durationBox.getElementsByTagName("button");
|
||||
for (const b of durationButtons) {
|
||||
b.addEventListener("click", function (_e) {
|
||||
if (this.classList.contains("selected")) {
|
||||
return;
|
||||
}
|
||||
document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds + "s";
|
||||
let selected = this.parentNode.getElementsByClassName("selected");
|
||||
selected[0].classList.remove("selected");
|
||||
this.classList.add("selected");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// This is the entrypoint for everything basically
|
||||
document.addEventListener("DOMContentLoaded", function(_event) {
|
||||
const form = document.getElementById("uploadForm");
|
||||
form.addEventListener("submit", formSubmit);
|
||||
let fileButton = document.getElementById("fileButton");
|
||||
|
||||
document.addEventListener("drop", (e) => {e.preventDefault();}, false);
|
||||
fileButton.addEventListener("dragover", (e) => {e.preventDefault();}, false);
|
||||
fileButton.addEventListener("drop", dragDropSubmit, false);
|
||||
|
||||
initEverything();
|
||||
});
|
Loading…
Reference in a new issue