Chunked uploads work, no feedback to client yet

This commit is contained in:
G2-Games 2024-10-31 01:44:16 -05:00
parent 2b8d7255b8
commit 9b80670961
5 changed files with 231 additions and 140 deletions

View file

@ -28,17 +28,16 @@ pub struct Mochibase {
/// All entries in the database /// All entries in the database
entries: HashMap<Mmid, MochiFile>, entries: HashMap<Mmid, MochiFile>,
chunks: HashMap<Uuid, DateTime<Utc>>,
} }
impl Mochibase { impl Mochibase {
/// Create a new database initialized with no data, and save it to the
/// provided path
pub fn new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> { pub fn new<P: AsRef<Path>>(path: &P) -> Result<Self, io::Error> {
let output = Self { let output = Self {
path: path.as_ref().to_path_buf(), path: path.as_ref().to_path_buf(),
entries: HashMap::new(), entries: HashMap::new(),
hashes: HashMap::new(), hashes: HashMap::new(),
chunks: HashMap::new(),
}; };
// Save the database initially after creating it // Save the database initially after creating it
@ -355,3 +354,35 @@ impl<'r> FromFormField<'r> for Mmid {
) )
} }
} }
/// An in-memory database for partially uploaded chunks of files
#[derive(Default, Debug)]
pub struct Chunkbase {
chunks: HashMap<Uuid, ChunkedInfo>,
}
impl Chunkbase {
pub fn chunks(&self) -> &HashMap<Uuid, ChunkedInfo> {
&self.chunks
}
pub fn mut_chunks(&mut self) -> &mut HashMap<Uuid, ChunkedInfo> {
&mut self.chunks
}
}
/// Information about how to manage partially uploaded chunks of files
#[serde_as]
#[derive(Default, Debug, Clone)]
#[derive(Deserialize, Serialize)]
pub struct ChunkedInfo {
pub name: String,
pub size: u64,
#[serde_as(as = "serde_with::DurationSeconds<i64>")]
pub expire_duration: TimeDelta,
#[serde(skip)]
pub path: PathBuf,
#[serde(skip)]
pub offset: u64,
}

View file

@ -6,20 +6,20 @@ pub mod settings;
pub mod strings; pub mod strings;
pub mod utils; pub mod utils;
use std::sync::{Arc, RwLock}; use std::{io, sync::{Arc, RwLock}};
use crate::{ use crate::{
database::{Mmid, MochiFile, Mochibase},
pages::{footer, head}, pages::{footer, head},
settings::Settings, settings::Settings,
strings::{parse_time_string, to_pretty_time}, strings::to_pretty_time,
utils::hash_file,
}; };
use chrono::{DateTime, Utc}; use chrono::Utc;
use database::{Chunkbase, ChunkedInfo, Mmid, MochiFile, Mochibase};
use maud::{html, Markup, PreEscaped}; use maud::{html, Markup, PreEscaped};
use rocket::{ use rocket::{
data::ToByteUnit, form::Form, fs::TempFile, get, post, serde::{json::Json, Serialize}, FromForm, State data::{ByteUnit, ToByteUnit}, get, post, serde::{json::Json, Serialize}, tokio::{fs, io::{AsyncSeekExt, AsyncWriteExt}}, Data, State
}; };
use utils::hash_file;
use uuid::Uuid; use uuid::Uuid;
#[get("/")] #[get("/")]
@ -49,7 +49,7 @@ pub fn home(settings: &State<Settings>) -> Markup {
form #uploadForm { form #uploadForm {
// It's stupid how these can't be styled so they're just hidden here... // It's stupid how these can't be styled so they're just hidden here...
input #fileDuration type="text" name="duration" minlength="2" input #fileDuration type="text" name="duration" minlength="2"
maxlength="7" value=(settings.duration.default.num_seconds().to_string() + "s") style="display:none;"; maxlength="7" value=(settings.duration.default.num_seconds().to_string()) style="display:none;";
input #fileInput type="file" name="fileUpload" multiple input #fileInput type="file" name="fileUpload" multiple
onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;"; onchange="formSubmit(this.parentNode)" data-max-filesize=(settings.max_filesize) style="display:none;";
} }
@ -66,11 +66,9 @@ pub fn home(settings: &State<Settings>) -> Markup {
} }
} }
/*
#[derive(Debug, FromForm)] #[derive(Debug, FromForm)]
pub struct Upload<'r> { pub struct Upload<'r> {
#[field(name = "duration")]
expire_time: String,
#[field(name = "fileUpload")] #[field(name = "fileUpload")]
file: TempFile<'r>, file: TempFile<'r>,
} }
@ -104,15 +102,16 @@ impl ClientResponse {
} }
/// Handle a file upload and store it /// Handle a file upload and store it
#[post("/upload", data = "<file_data>")] #[post("/upload?<expire_time>", data = "<file_data>")]
pub async fn handle_upload( pub async fn handle_upload(
expire_time: String,
mut file_data: Form<Upload<'_>>, mut file_data: Form<Upload<'_>>,
db: &State<Arc<RwLock<Mochibase>>>, db: &State<Arc<RwLock<Mochibase>>>,
settings: &State<Settings>, settings: &State<Settings>,
) -> Result<Json<ClientResponse>, std::io::Error> { ) -> Result<Json<ClientResponse>, std::io::Error> {
let current = Utc::now(); let current = Utc::now();
// Ensure the expiry time is valid, if not return an error // Ensure the expiry time is valid, if not return an error
let expire_time = if let Ok(t) = parse_time_string(&file_data.expire_time) { let expire_time = if let Ok(t) = parse_time_string(&expire_time) {
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) { if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&t) {
return Ok(Json(ClientResponse::failure("Duration not allowed"))); return Ok(Json(ClientResponse::failure("Duration not allowed")));
} }
@ -176,29 +175,165 @@ pub async fn handle_upload(
..Default::default() ..Default::default()
})) }))
} }
*/
#[derive(Serialize, Default)]
pub struct ChunkedResponse { pub struct ChunkedResponse {
status: bool,
message: String,
/// UUID used for associating the chunk with the final file /// UUID used for associating the chunk with the final file
uuid: Uuid, #[serde(skip_serializing_if = "Option::is_none")]
uuid: Option<Uuid>,
/// Valid max chunk size in bytes /// Valid max chunk size in bytes
chunk_size: u64, #[serde(skip_serializing_if = "Option::is_none")]
chunk_size: Option<ByteUnit>,
}
/// The datetime at which the upload will be invalidated unless new impl ChunkedResponse {
/// chunks have come in fn failure(message: &str) -> Self {
timeout: DateTime<Utc>, Self {
status: false,
/// The datetime at which the upload will be invalidated even if new message: message.to_string(),
/// chunks have come in ..Default::default()
hard_timeout: DateTime<Utc>, }
}
} }
/// Start a chunked upload. Response contains all the info you need to continue /// Start a chunked upload. Response contains all the info you need to continue
/// uploading chunks. /// uploading chunks.
#[get("/upload/chunked")] #[post("/upload/chunked", data = "<file_info>")]
pub async fn chunked_start() -> Result<Json<ClientResponse>, std::io::Error> { pub async fn chunked_upload_start(
db: &State<Arc<RwLock<Chunkbase>>>,
settings: &State<Settings>,
mut file_info: Json<ChunkedInfo>,
) -> Result<Json<ChunkedResponse>, std::io::Error> {
let uuid = Uuid::new_v4();
file_info.path = settings
.temp_dir
.join(uuid.to_string());
// Perform some sanity checks
if file_info.size > settings.max_filesize {
return Ok(Json(ChunkedResponse::failure("File too large")));
}
if settings.duration.restrict_to_allowed && !settings.duration.allowed.contains(&file_info.expire_duration) {
return Ok(Json(ChunkedResponse::failure("Duration not allowed")));
}
if file_info.expire_duration > settings.duration.maximum {
return Ok(Json(ChunkedResponse::failure("Duration too large")));
}
db.write()
.unwrap()
.mut_chunks()
.insert(uuid, file_info.into_inner());
todo!() Ok(Json(ChunkedResponse {
status: true,
message: "".into(),
uuid: Some(uuid),
chunk_size: Some(100.megabytes()),
}))
}
#[post("/upload/chunked?<uuid>", data = "<data>")]
pub async fn chunked_upload_continue(
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
data: Data<'_>,
uuid: String,
) -> Result<(), io::Error> {
let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?;
let data_stream = data.open(101.megabytes());
let chunked_info = match chunk_db.read().unwrap().chunks().get(&uuid) {
Some(s) => s.clone(),
None => return Err(io::Error::other("Invalid UUID")),
};
let mut file = if !chunked_info.path.try_exists().is_ok_and(|e| e) {
fs::File::create_new(&chunked_info.path).await?
} else {
fs::File::options()
.read(true)
.write(true)
.truncate(false)
.open(&chunked_info.path)
.await?
};
file.seek(io::SeekFrom::Start(chunked_info.offset)).await?;
data_stream.stream_to(&mut file).await?.written;
file.flush().await?;
let position = file.stream_position().await?;
if position > chunked_info.size {
chunk_db.write()
.unwrap()
.mut_chunks()
.remove(&uuid);
return Err(io::Error::other("File larger than expected"))
}
chunk_db.write()
.unwrap()
.mut_chunks()
.get_mut(&uuid)
.unwrap()
.offset = position;
Ok(())
}
/// Finalize a chunked upload
#[post("/upload/chunked?<uuid>&finish")]
pub async fn chunked_upload_finish(
main_db: &State<Arc<RwLock<Mochibase>>>,
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
settings: &State<Settings>,
uuid: String,
) -> Result<Json<MochiFile>, io::Error> {
let now = Utc::now();
let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?;
let chunked_info = match chunk_db.read().unwrap().chunks().get(&uuid) {
Some(s) => s.clone(),
None => return Err(io::Error::other("Invalid UUID")),
};
// Remove the finished chunk from the db
chunk_db.write()
.unwrap()
.mut_chunks()
.remove(&uuid)
.unwrap();
if !chunked_info.path.try_exists().is_ok_and(|e| e) {
return Err(io::Error::other("File does not exist"))
}
let hash = hash_file(&chunked_info.path).await?;
let mmid = Mmid::new_random();
let file_type = file_format::FileFormat::from_file(&chunked_info.path)?;
// If the hash does not exist in the database,
// move the file to the backend, else, delete it
if main_db.read().unwrap().get_hash(&hash).is_none() {
std::fs::rename(chunked_info.path, settings.file_dir.join(hash.to_string()))?;
} else {
std::fs::remove_file(chunked_info.path)?;
}
let constructed_file = MochiFile::new(
mmid.clone(),
chunked_info.name,
file_type.media_type().to_string(),
hash,
now,
now + chunked_info.expire_duration
);
main_db.write().unwrap().insert(&mmid, constructed_file.clone());
Ok(Json(constructed_file))
} }

View file

@ -5,7 +5,7 @@ use std::{
use chrono::TimeDelta; use chrono::TimeDelta;
use confetti_box::{ use confetti_box::{
database::{clean_loop, Mochibase}, database::{clean_loop, Chunkbase, Mochibase},
endpoints, pages, resources, endpoints, pages, resources,
settings::Settings, settings::Settings,
}; };
@ -39,6 +39,9 @@ async fn main() {
let database = Arc::new(RwLock::new( let database = Arc::new(RwLock::new(
Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"), Mochibase::open_or_new(&config.database_path).expect("Failed to open or create database"),
)); ));
let chunkbase = Arc::new(RwLock::new(
Chunkbase::default(),
));
let local_db = database.clone(); let local_db = database.clone();
// Start monitoring thread, cleaning the database every 2 minutes // Start monitoring thread, cleaning the database every 2 minutes
@ -65,7 +68,9 @@ async fn main() {
.mount( .mount(
config.server.root_path.clone() + "/", config.server.root_path.clone() + "/",
routes![ routes![
confetti_box::handle_upload, confetti_box::chunked_upload_start,
confetti_box::chunked_upload_continue,
confetti_box::chunked_upload_finish,
endpoints::server_info, endpoints::server_info,
endpoints::file_info, endpoints::file_info,
endpoints::lookup_mmid, endpoints::lookup_mmid,
@ -74,6 +79,7 @@ async fn main() {
], ],
) )
.manage(database) .manage(database)
.manage(chunkbase)
.manage(config) .manage(config)
.configure(rocket_config) .configure(rocket_config)
.launch() .launch()

View file

@ -1,7 +1,7 @@
use blake3::Hash; use blake3::Hash;
use std::path::Path; use std::path::Path;
/// Get the Blake3 hash of a file, without reading it all into memory, and also get the size /// Get the Blake3 hash of a file, without reading it all into memory
pub async fn hash_file<P: AsRef<Path>>(input: &P) -> Result<Hash, std::io::Error> { pub async fn hash_file<P: AsRef<Path>>(input: &P) -> Result<Hash, std::io::Error> {
let mut hasher = blake3::Hasher::new(); let mut hasher = blake3::Hasher::new();
hasher.update_mmap_rayon(input)?; hasher.update_mmap_rayon(input)?;

View file

@ -19,6 +19,7 @@ async function formSubmit() {
async function dragDropSubmit(evt) { async function dragDropSubmit(evt) {
const form = document.getElementById("uploadForm"); const form = document.getElementById("uploadForm");
const duration = form.elements.duration.value; const duration = form.elements.duration.value;
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
evt.preventDefault(); evt.preventDefault();
@ -38,12 +39,13 @@ async function dragDropSubmit(evt) {
}); });
} }
await sendFile(files, duration); await sendFile(files, duration, maxSize);
} }
async function pasteSubmit(evt) { async function pasteSubmit(evt) {
const form = document.getElementById("uploadForm"); const form = document.getElementById("uploadForm");
const duration = form.elements.duration.value; const duration = form.elements.duration.value;
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
const files = []; const files = [];
const len = evt.clipboardData.files.length; const len = evt.clipboardData.files.length;
@ -52,80 +54,46 @@ async function pasteSubmit(evt) {
files.push(file); files.push(file);
} }
await sendFile(files, duration); await sendFile(files, duration, maxSize);
} }
async function sendFile(files, duration, maxSize) { async function sendFile(files, duration, maxSize) {
for (const file of files) { for (const file of files) {
const [linkRow, progressBar, progressText] = addNewToList(file.name);
if (file.size > maxSize) { if (file.size > maxSize) {
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes"); console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
continue; continue;
} else if (file.size == 0) { } else if (file.size == 0) {
makeErrored(progressBar, progressText, linkRow, ZERO_TEXT);
console.error("Provided file has 0 bytes"); console.error("Provided file has 0 bytes");
continue; continue;
} }
const request = new XMLHttpRequest(); let chunkedResponse;
request.open('POST', "./upload", true);
// Set up event listeners
request.upload.addEventListener('progress',
(p) => {uploadProgress(p, progressBar, progressText, linkRow);}, false);
request.addEventListener('load',
(c) => {uploadComplete(c, progressBar, progressText, linkRow);}, false);
request.addEventListener('error',
(e) => {networkErrorHandler(e, progressBar, progressText, linkRow);}, false);
linkRow.classList.add("upload_inprogress");
// Create and send FormData
try { try {
const formData = new FormData(); const response = await fetch("/upload/chunked", {
formData.append("duration", duration); method: "POST",
formData.append("fileUpload", file); body: JSON.stringify({
request.send(formData); "name": file.name,
} catch (e) { "size": file.size,
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT); "expire_duration": parseInt(duration),
console.error("An error occured while uploading", e); }),
}
}
}
function makeErrored(progressBar, progressText, linkRow, errorMessage) {
progressText.textContent = errorMessage;
progressBar.style.display = "none";
linkRow.classList.add("upload_failed");
}
function makeFinished(progressBar, progressText, linkRow, response) {
progressText.textContent = "";
const link = progressText.appendChild(document.createElement("a"));
link.textContent = response.mmid;
link.href = "/f/" + response.mmid;
link.target = "_blank";
let button = linkRow.appendChild(document.createElement("button"));
button.textContent = "📝";
let buttonTimeout = null;
button.addEventListener('click', function(_e) {
const mmid = response.mmid;
if (buttonTimeout) {
clearTimeout(buttonTimeout);
}
navigator.clipboard.writeText(
window.location.protocol + "//" + window.location.host + "/f/" + mmid
);
button.textContent = "✅";
buttonTimeout = setTimeout(function() {
button.textContent = "📝";
}, 750);
}); });
if (!response.ok) {
throw new Error(`Response status: ${response.status}`);
}
chunkedResponse = await response.json();
} catch (error) {
console.error(error);
}
progressBar.style.display = "none"; // Upload the file in `chunk_size` chunks
linkRow.classList.add("upload_done"); for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid, { method: 'post', body: chunk }).then(res => res.text())
}
console.log(await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish", { method: 'post' }).then(res => res.json()))
}
} }
function networkErrorHandler(err, progressBar, progressText, linkRow) { function networkErrorHandler(err, progressBar, progressText, linkRow) {
@ -133,55 +101,6 @@ function networkErrorHandler(err, progressBar, progressText, linkRow) {
console.error("A network error occured while uploading", err); console.error("A network error occured while uploading", err);
} }
function uploadProgress(progress, progressBar, progressText, _linkRow) {
if (progress.lengthComputable) {
const progressPercent = Math.floor((progress.loaded / progress.total) * 100);
if (progressPercent == 100) {
progressBar.removeAttribute("value");
progressText.textContent = "⏳";
} else {
progressBar.value = progressPercent;
progressText.textContent = progressPercent + "%";
}
}
}
function uploadComplete(response, progressBar, progressText, linkRow) {
let target = response.target;
if (target.status === 200) {
const response = JSON.parse(target.responseText);
if (response.status) {
console.log("Successfully uploaded file", response);
makeFinished(progressBar, progressText, linkRow, response);
} else {
console.error("Error uploading", response);
makeErrored(progressBar, progressText, linkRow, response.response);
}
} else if (target.status === 413) {
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
} else {
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
}
}
function addNewToList(origFileName) {
const uploadedFilesDisplay = document.getElementById("uploadedFilesDisplay");
const linkRow = uploadedFilesDisplay.appendChild(document.createElement("div"));
const fileName = linkRow.appendChild(document.createElement("p"));
const progressBar = linkRow.appendChild(document.createElement("progress"));
const progressTxt = linkRow.appendChild(document.createElement("p"));
fileName.textContent = origFileName;
fileName.classList.add("file_name");
progressTxt.classList.add("status");
progressBar.max="100";
progressBar.value="0";
return [linkRow, progressBar, progressTxt];
}
async function initEverything() { async function initEverything() {
const durationBox = document.getElementById("durationBox"); const durationBox = document.getElementById("durationBox");
const durationButtons = durationBox.getElementsByTagName("button"); const durationButtons = durationBox.getElementsByTagName("button");
@ -190,7 +109,7 @@ async function initEverything() {
if (this.classList.contains("selected")) { if (this.classList.contains("selected")) {
return; return;
} }
document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds + "s"; document.getElementById("uploadForm").elements.duration.value = this.dataset.durationSeconds;
let selected = this.parentNode.getElementsByClassName("selected"); let selected = this.parentNode.getElementsByClassName("selected");
selected[0].classList.remove("selected"); selected[0].classList.remove("selected");
this.classList.add("selected"); this.classList.add("selected");