From 9b80670961d9a88bcd37f3eb511acf00cb499d47 Mon Sep 17 00:00:00 2001 From: G2-Games Date: Thu, 31 Oct 2024 01:44:16 -0500 Subject: [PATCH] Chunked uploads work, no feedback to client yet --- src/database.rs | 37 +++++++++- src/lib.rs | 183 +++++++++++++++++++++++++++++++++++++++++------- src/main.rs | 10 ++- src/utils.rs | 2 +- web/request.js | 139 ++++++++---------------------------- 5 files changed, 231 insertions(+), 140 deletions(-) diff --git a/src/database.rs b/src/database.rs index 5d1cc9f..6d4e6e7 100644 --- a/src/database.rs +++ b/src/database.rs @@ -28,17 +28,16 @@ pub struct Mochibase { /// All entries in the database entries: HashMap, - - chunks: HashMap>, } impl Mochibase { + /// Create a new database initialized with no data, and save it to the + /// provided path pub fn new>(path: &P) -> Result { let output = Self { path: path.as_ref().to_path_buf(), entries: HashMap::new(), hashes: HashMap::new(), - chunks: HashMap::new(), }; // Save the database initially after creating it @@ -355,3 +354,35 @@ impl<'r> FromFormField<'r> for Mmid { ) } } + +/// An in-memory database for partially uploaded chunks of files +#[derive(Default, Debug)] +pub struct Chunkbase { + chunks: HashMap, +} + +impl Chunkbase { + pub fn chunks(&self) -> &HashMap { + &self.chunks + } + + pub fn mut_chunks(&mut self) -> &mut HashMap { + &mut self.chunks + } +} + +/// Information about how to manage partially uploaded chunks of files +#[serde_as] +#[derive(Default, Debug, Clone)] +#[derive(Deserialize, Serialize)] +pub struct ChunkedInfo { + pub name: String, + pub size: u64, + #[serde_as(as = "serde_with::DurationSeconds")] + pub expire_duration: TimeDelta, + + #[serde(skip)] + pub path: PathBuf, + #[serde(skip)] + pub offset: u64, +} diff --git a/src/lib.rs b/src/lib.rs index b57bc0a..6dcff88 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,20 +6,20 @@ pub mod settings; pub mod strings; pub mod utils; -use std::sync::{Arc, RwLock}; +use std::{io, sync::{Arc, RwLock}}; use crate::{ - database::{Mmid, MochiFile, Mochibase}, pages::{footer, head}, settings::Settings, - strings::{parse_time_string, to_pretty_time}, - utils::hash_file, + strings::to_pretty_time, }; -use chrono::{DateTime, Utc}; +use chrono::Utc; +use database::{Chunkbase, ChunkedInfo, Mmid, MochiFile, Mochibase}; use maud::{html, Markup, PreEscaped}; use rocket::{ - data::ToByteUnit, form::Form, fs::TempFile, get, post, serde::{json::Json, Serialize}, FromForm, State + data::{ByteUnit, ToByteUnit}, get, post, serde::{json::Json, Serialize}, tokio::{fs, io::{AsyncSeekExt, AsyncWriteExt}}, Data, State }; +use utils::hash_file; use uuid::Uuid; #[get("/")] @@ -49,7 +49,7 @@ pub fn home(settings: &State) -> Markup { form #uploadForm { // It's stupid how these can't be styled so they're just hidden here... input #fileDuration type="text" name="duration" minlength="2" - maxlength="7" value=(settings.duration.default.num_seconds().to_string() + "s") style="display:none;"; + maxlength="7" value=(settings.duration.default.num_seconds().to_string()) style="display:none;"; input #fileInput type="file" name="fileUpload" multiple onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;"; } @@ -66,11 +66,9 @@ pub fn home(settings: &State) -> Markup { } } +/* #[derive(Debug, FromForm)] pub struct Upload<'r> { - #[field(name = "duration")] - expire_time: String, - #[field(name = "fileUpload")] file: TempFile<'r>, } @@ -104,15 +102,16 @@ impl ClientResponse { } /// Handle a file upload and store it -#[post("/upload", data = "")] +#[post("/upload?", data = "")] pub async fn handle_upload( + expire_time: String, mut file_data: Form>, db: &State>>, settings: &State, ) -> Result, std::io::Error> { let current = Utc::now(); // Ensure the expiry time is valid, if not return an error - let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { + let expire_time = if let Ok(t) = parse_time_string(&expire_time) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { return Ok(Json(ClientResponse::failure("Duration not allowed"))); } @@ -176,29 +175,165 @@ pub async fn handle_upload( ..Default::default() })) } +*/ +#[derive(Serialize, Default)] pub struct ChunkedResponse { + status: bool, + message: String, + /// UUID used for associating the chunk with the final file - uuid: Uuid, + #[serde(skip_serializing_if = "Option::is_none")] + uuid: Option, /// Valid max chunk size in bytes - chunk_size: u64, + #[serde(skip_serializing_if = "Option::is_none")] + chunk_size: Option, +} - /// The datetime at which the upload will be invalidated unless new - /// chunks have come in - timeout: DateTime, - - /// The datetime at which the upload will be invalidated even if new - /// chunks have come in - hard_timeout: DateTime, +impl ChunkedResponse { + fn failure(message: &str) -> Self { + Self { + status: false, + message: message.to_string(), + ..Default::default() + } + } } /// Start a chunked upload. Response contains all the info you need to continue /// uploading chunks. -#[get("/upload/chunked")] -pub async fn chunked_start() -> Result, std::io::Error> { +#[post("/upload/chunked", data = "")] +pub async fn chunked_upload_start( + db: &State>>, + settings: &State, + mut file_info: Json, +) -> Result, std::io::Error> { + let uuid = Uuid::new_v4(); + file_info.path = settings + .temp_dir + .join(uuid.to_string()); + // Perform some sanity checks + if file_info.size > settings.max_filesize { + return Ok(Json(ChunkedResponse::failure("File too large"))); + } + if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&file_info.expire_duration) { + return Ok(Json(ChunkedResponse::failure("Duration not allowed"))); + } + if file_info.expire_duration > settings.duration.maximum { + return Ok(Json(ChunkedResponse::failure("Duration too large"))); + } + db.write() + .unwrap() + .mut_chunks() + .insert(uuid, file_info.into_inner()); - todo!() + Ok(Json(ChunkedResponse { + status: true, + message: "".into(), + uuid: Some(uuid), + chunk_size: Some(100.megabytes()), + })) +} + +#[post("/upload/chunked?", data = "")] +pub async fn chunked_upload_continue( + chunk_db: &State>>, + data: Data<'_>, + uuid: String, +) -> Result<(), io::Error> { + let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?; + let data_stream = data.open(101.megabytes()); + + let chunked_info = match chunk_db.read().unwrap().chunks().get(&uuid) { + Some(s) => s.clone(), + None => return Err(io::Error::other("Invalid UUID")), + }; + + let mut file = if !chunked_info.path.try_exists().is_ok_and(|e| e) { + fs::File::create_new(&chunked_info.path).await? + } else { + fs::File::options() + .read(true) + .write(true) + .truncate(false) + .open(&chunked_info.path) + .await? + }; + + file.seek(io::SeekFrom::Start(chunked_info.offset)).await?; + data_stream.stream_to(&mut file).await?.written; + file.flush().await?; + let position = file.stream_position().await?; + + if position > chunked_info.size { + chunk_db.write() + .unwrap() + .mut_chunks() + .remove(&uuid); + return Err(io::Error::other("File larger than expected")) + } + + chunk_db.write() + .unwrap() + .mut_chunks() + .get_mut(&uuid) + .unwrap() + .offset = position; + + Ok(()) +} + +/// Finalize a chunked upload +#[post("/upload/chunked?&finish")] +pub async fn chunked_upload_finish( + main_db: &State>>, + chunk_db: &State>>, + settings: &State, + uuid: String, +) -> Result, io::Error> { + let now = Utc::now(); + let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?; + let chunked_info = match chunk_db.read().unwrap().chunks().get(&uuid) { + Some(s) => s.clone(), + None => return Err(io::Error::other("Invalid UUID")), + }; + + // Remove the finished chunk from the db + chunk_db.write() + .unwrap() + .mut_chunks() + .remove(&uuid) + .unwrap(); + + if !chunked_info.path.try_exists().is_ok_and(|e| e) { + return Err(io::Error::other("File does not exist")) + } + + let hash = hash_file(&chunked_info.path).await?; + let mmid = Mmid::new_random(); + let file_type = file_format::FileFormat::from_file(&chunked_info.path)?; + + // If the hash does not exist in the database, + // move the file to the backend, else, delete it + if main_db.read().unwrap().get_hash(&hash).is_none() { + std::fs::rename(chunked_info.path, settings.file_dir.join(hash.to_string()))?; + } else { + std::fs::remove_file(chunked_info.path)?; + } + + let constructed_file = MochiFile::new( + mmid.clone(), + chunked_info.name, + file_type.media_type().to_string(), + hash, + now, + now + chunked_info.expire_duration + ); + + main_db.write().unwrap().insert(&mmid, constructed_file.clone()); + + Ok(Json(constructed_file)) } diff --git a/src/main.rs b/src/main.rs index 860239c..c01523e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,7 +5,7 @@ use std::{ use chrono::TimeDelta; use confetti_box::{ - database::{clean_loop, Mochibase}, + database::{clean_loop, Chunkbase, Mochibase}, endpoints, pages, resources, settings::Settings, }; @@ -39,6 +39,9 @@ async fn main() { let database = Arc::new(RwLock::new( Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"), )); + let chunkbase = Arc::new(RwLock::new( + Chunkbase::default(), + )); let local_db = database.clone(); // Start monitoring thread, cleaning the database every 2 minutes @@ -65,7 +68,9 @@ async fn main() { .mount( config.server.root_path.clone() + "/", routes![ - confetti_box::handle_upload, + confetti_box::chunked_upload_start, + confetti_box::chunked_upload_continue, + confetti_box::chunked_upload_finish, endpoints::server_info, endpoints::file_info, endpoints::lookup_mmid, @@ -74,6 +79,7 @@ async fn main() { ], ) .manage(database) + .manage(chunkbase) .manage(config) .configure(rocket_config) .launch() diff --git a/src/utils.rs b/src/utils.rs index dfb202b..565d6e0 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,7 +1,7 @@ use blake3::Hash; use std::path::Path; -/// Get the Blake3 hash of a file, without reading it all into memory, and also get the size +/// Get the Blake3 hash of a file, without reading it all into memory pub async fn hash_file>(input: &P) -> Result { let mut hasher = blake3::Hasher::new(); hasher.update_mmap_rayon(input)?; diff --git a/web/request.js b/web/request.js index 5e2629c..12baf54 100644 --- a/web/request.js +++ b/web/request.js @@ -19,6 +19,7 @@ async function formSubmit() { async function dragDropSubmit(evt) { const form = document.getElementById("uploadForm"); const duration = form.elements.duration.value; + const maxSize = form.elements.fileUpload.dataset.maxFilesize; evt.preventDefault(); @@ -38,12 +39,13 @@ async function dragDropSubmit(evt) { }); } - await sendFile(files, duration); + await sendFile(files, duration, maxSize); } async function pasteSubmit(evt) { const form = document.getElementById("uploadForm"); const duration = form.elements.duration.value; + const maxSize = form.elements.fileUpload.dataset.maxFilesize; const files = []; const len = evt.clipboardData.files.length; @@ -52,136 +54,53 @@ async function pasteSubmit(evt) { files.push(file); } - await sendFile(files, duration); + await sendFile(files, duration, maxSize); } async function sendFile(files, duration, maxSize) { for (const file of files) { - const [linkRow, progressBar, progressText] = addNewToList(file.name); if (file.size > maxSize) { - makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT); console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes"); continue; } else if (file.size == 0) { - makeErrored(progressBar, progressText, linkRow, ZERO_TEXT); console.error("Provided file has 0 bytes"); continue; } - const request = new XMLHttpRequest(); - request.open('POST', "./upload", true); - - // Set up event listeners - request.upload.addEventListener('progress', - (p) => {uploadProgress(p, progressBar, progressText, linkRow);}, false); - request.addEventListener('load', - (c) => {uploadComplete(c, progressBar, progressText, linkRow);}, false); - request.addEventListener('error', - (e) => {networkErrorHandler(e, progressBar, progressText, linkRow);}, false); - - linkRow.classList.add("upload_inprogress"); - - // Create and send FormData + let chunkedResponse; try { - const formData = new FormData(); - formData.append("duration", duration); - formData.append("fileUpload", file); - request.send(formData); - } catch (e) { - makeErrored(progressBar, progressText, linkRow, ERROR_TEXT); - console.error("An error occured while uploading", e); + const response = await fetch("/upload/chunked", { + method: "POST", + body: JSON.stringify({ + "name": file.name, + "size": file.size, + "expire_duration": parseInt(duration), + }), + }); + if (!response.ok) { + throw new Error(`Response status: ${response.status}`); + } + chunkedResponse = await response.json(); + } catch (error) { + console.error(error); } + + // Upload the file in `chunk_size` chunks + for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) { + const chunk = file.slice(start, start + chunkedResponse.chunk_size) + + await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid, { method: 'post', body: chunk }).then(res => res.text()) + } + + console.log(await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish", { method: 'post' }).then(res => res.json())) } } -function makeErrored(progressBar, progressText, linkRow, errorMessage) { - progressText.textContent = errorMessage; - progressBar.style.display = "none"; - linkRow.classList.add("upload_failed"); -} - -function makeFinished(progressBar, progressText, linkRow, response) { - progressText.textContent = ""; - const link = progressText.appendChild(document.createElement("a")); - link.textContent = response.mmid; - link.href = "/f/" + response.mmid; - link.target = "_blank"; - - let button = linkRow.appendChild(document.createElement("button")); - button.textContent = "📝"; - let buttonTimeout = null; - button.addEventListener('click', function(_e) { - const mmid = response.mmid; - if (buttonTimeout) { - clearTimeout(buttonTimeout); - } - navigator.clipboard.writeText( - window.location.protocol + "//" + window.location.host + "/f/" + mmid - ); - button.textContent = "✅"; - buttonTimeout = setTimeout(function() { - button.textContent = "📝"; - }, 750); - }); - - progressBar.style.display = "none"; - linkRow.classList.add("upload_done"); -} - function networkErrorHandler(err, progressBar, progressText, linkRow) { makeErrored(progressBar, progressText, linkRow, "A network error occured"); console.error("A network error occured while uploading", err); } -function uploadProgress(progress, progressBar, progressText, _linkRow) { - if (progress.lengthComputable) { - const progressPercent = Math.floor((progress.loaded / progress.total) * 100); - if (progressPercent == 100) { - progressBar.removeAttribute("value"); - progressText.textContent = "⏳"; - } else { - progressBar.value = progressPercent; - progressText.textContent = progressPercent + "%"; - } - } -} - -function uploadComplete(response, progressBar, progressText, linkRow) { - let target = response.target; - - if (target.status === 200) { - const response = JSON.parse(target.responseText); - - if (response.status) { - console.log("Successfully uploaded file", response); - makeFinished(progressBar, progressText, linkRow, response); - } else { - console.error("Error uploading", response); - makeErrored(progressBar, progressText, linkRow, response.response); - } - } else if (target.status === 413) { - makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT); - } else { - makeErrored(progressBar, progressText, linkRow, ERROR_TEXT); - } -} - -function addNewToList(origFileName) { - const uploadedFilesDisplay = document.getElementById("uploadedFilesDisplay"); - const linkRow = uploadedFilesDisplay.appendChild(document.createElement("div")); - const fileName = linkRow.appendChild(document.createElement("p")); - const progressBar = linkRow.appendChild(document.createElement("progress")); - const progressTxt = linkRow.appendChild(document.createElement("p")); - - fileName.textContent = origFileName; - fileName.classList.add("file_name"); - progressTxt.classList.add("status"); - progressBar.max="100"; - progressBar.value="0"; - - return [linkRow, progressBar, progressTxt]; -} - async function initEverything() { const durationBox = document.getElementById("durationBox"); const durationButtons = durationBox.getElementsByTagName("button"); @@ -190,7 +109,7 @@ async function initEverything() { if (this.classList.contains("selected")) { return; } - document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds + "s"; + document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds; let selected = this.parentNode.getElementsByClassName("selected"); selected[0].classList.remove("selected"); this.classList.add("selected");