mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 23:32:58 -05:00
I think these methods are cursed
This commit is contained in:
parent
4875445325
commit
bf17b1bc04
2 changed files with 92 additions and 72 deletions
12
src/lib.rs
12
src/lib.rs
|
@ -6,7 +6,7 @@ pub mod settings;
|
||||||
pub mod strings;
|
pub mod strings;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
use std::{io, sync::{Arc, RwLock}};
|
use std::{io::{self, ErrorKind}, sync::{Arc, RwLock}};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
pages::{footer, head},
|
pages::{footer, head},
|
||||||
|
@ -203,7 +203,7 @@ impl ChunkedResponse {
|
||||||
|
|
||||||
/// Start a chunked upload. Response contains all the info you need to continue
|
/// Start a chunked upload. Response contains all the info you need to continue
|
||||||
/// uploading chunks.
|
/// uploading chunks.
|
||||||
#[post("/upload/chunked", data = "<file_info>", rank = 1)]
|
#[post("/upload/chunked", data = "<file_info>", rank = 2)]
|
||||||
pub async fn chunked_upload_start(
|
pub async fn chunked_upload_start(
|
||||||
db: &State<Arc<RwLock<Chunkbase>>>,
|
db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
settings: &State<Settings>,
|
settings: &State<Settings>,
|
||||||
|
@ -238,7 +238,7 @@ pub async fn chunked_upload_start(
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/upload/chunked?<uuid>&<offset>", data = "<data>")]
|
#[post("/upload/chunked?<uuid>&<offset>", data = "<data>", rank = 1)]
|
||||||
pub async fn chunked_upload_continue(
|
pub async fn chunked_upload_continue(
|
||||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
data: Data<'_>,
|
data: Data<'_>,
|
||||||
|
@ -264,6 +264,10 @@ pub async fn chunked_upload_continue(
|
||||||
.await?
|
.await?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if offset > chunked_info.size {
|
||||||
|
return Err(io::Error::new(ErrorKind::InvalidInput, "The seek position is larger than the file size"))
|
||||||
|
}
|
||||||
|
|
||||||
file.seek(io::SeekFrom::Start(offset)).await?;
|
file.seek(io::SeekFrom::Start(offset)).await?;
|
||||||
data_stream.stream_to(&mut file).await?.written;
|
data_stream.stream_to(&mut file).await?.written;
|
||||||
file.flush().await?;
|
file.flush().await?;
|
||||||
|
@ -281,7 +285,7 @@ pub async fn chunked_upload_continue(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finalize a chunked upload
|
/// Finalize a chunked upload
|
||||||
#[get("/upload/chunked?<uuid>&finish")]
|
#[get("/upload/chunked?<uuid>&finish", rank = 3)]
|
||||||
pub async fn chunked_upload_finish(
|
pub async fn chunked_upload_finish(
|
||||||
main_db: &State<Arc<RwLock<Mochibase>>>,
|
main_db: &State<Arc<RwLock<Mochibase>>>,
|
||||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
|
|
152
web/request.js
152
web/request.js
|
@ -10,7 +10,7 @@ async function formSubmit() {
|
||||||
const duration = form.elements.duration.value;
|
const duration = form.elements.duration.value;
|
||||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
|
|
||||||
// Reset the form file data since we've successfully submitted it
|
// Reset the form file data since we've successfully submitted it
|
||||||
form.elements.fileUpload.value = "";
|
form.elements.fileUpload.value = "";
|
||||||
|
@ -39,7 +39,7 @@ async function dragDropSubmit(evt) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function pasteSubmit(evt) {
|
async function pasteSubmit(evt) {
|
||||||
|
@ -54,74 +54,92 @@ async function pasteSubmit(evt) {
|
||||||
files.push(file);
|
files.push(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendFile(files, duration, maxSize) {
|
async function sendFiles(files, duration, maxSize) {
|
||||||
|
const uploadArray = [];
|
||||||
|
const concurrencyLimit = 10;
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
// Add each upload to the array
|
||||||
if (file.size > maxSize) {
|
uploadArray.push(uploadFile(file, duration, maxSize));
|
||||||
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
|
||||||
continue;
|
// If the number of uploads reaches the concurrency limit, wait for them to finish
|
||||||
} else if (file.size == 0) {
|
if (uploadArray.length >= concurrencyLimit) {
|
||||||
console.error("Provided file has 0 bytes");
|
await Promise.allSettled(uploadArray);
|
||||||
continue;
|
uploadArray.length = 0; // Clear the array after each batch
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get preliminary upload information
|
|
||||||
let chunkedResponse;
|
|
||||||
try {
|
|
||||||
const response = await fetch("/upload/chunked", {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify({
|
|
||||||
"name": file.name,
|
|
||||||
"size": file.size,
|
|
||||||
"expire_duration": parseInt(duration),
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Response status: ${response.status}`);
|
|
||||||
}
|
|
||||||
chunkedResponse = await response.json();
|
|
||||||
} catch (error) {
|
|
||||||
console.error(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Upload the file in `chunk_size` chunks
|
|
||||||
let uploadArray = [];
|
|
||||||
const progressValues = [];
|
|
||||||
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
|
|
||||||
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
|
|
||||||
const url = "/upload/chunked?uuid=" + chunkedResponse.uuid + "&offset=" + start;
|
|
||||||
const ID = progressValues.push(0);
|
|
||||||
|
|
||||||
let upload = new Promise(function (resolve, reject) {
|
|
||||||
let request = new XMLHttpRequest();
|
|
||||||
request.open("POST", url, true);
|
|
||||||
request.upload.addEventListener('progress',
|
|
||||||
(p) => {uploadProgress(p, progressBar, progressText, progressValues, file.size, ID);}, true
|
|
||||||
);
|
|
||||||
|
|
||||||
request.onload = () => {
|
|
||||||
if (this.status >= 200 && this.status < 300) {
|
|
||||||
resolve(request.response);
|
|
||||||
} else {
|
|
||||||
reject({status: this.status, statusText: request.statusText});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
request.onerror = () => reject({status: this.status, statusText: request.statusText});
|
|
||||||
request.send(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
uploadArray.push(upload);
|
|
||||||
}
|
|
||||||
console.log("Waiting for multiple uploads to complete");
|
|
||||||
console.log(await Promise.allSettled(uploadArray));
|
|
||||||
|
|
||||||
// Finish the request and update the progress box
|
|
||||||
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
|
|
||||||
uploadComplete(result, progressBar, progressText, linkRow);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Final batch to handle any remaining files
|
||||||
|
if (uploadArray.length > 0) {
|
||||||
|
await Promise.allSettled(uploadArray);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadFile(file, duration, maxSize) {
|
||||||
|
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
||||||
|
if (file.size > maxSize) {
|
||||||
|
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
||||||
|
return;
|
||||||
|
} else if (file.size == 0) {
|
||||||
|
console.error("Provided file has 0 bytes");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get preliminary upload information
|
||||||
|
let chunkedResponse;
|
||||||
|
try {
|
||||||
|
const response = await fetch("/upload/chunked", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({
|
||||||
|
"name": file.name,
|
||||||
|
"size": file.size,
|
||||||
|
"expire_duration": parseInt(duration),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Response status: ${response.status}`);
|
||||||
|
}
|
||||||
|
chunkedResponse = await response.json();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the file in `chunk_size` chunks
|
||||||
|
const chunkUploadArray = [];
|
||||||
|
const progressValues = [];
|
||||||
|
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
|
||||||
|
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
|
||||||
|
const url = "/upload/chunked?uuid=" + chunkedResponse.uuid + "&offset=" + start;
|
||||||
|
const ID = progressValues.push(0);
|
||||||
|
|
||||||
|
let upload = new Promise(function (resolve, reject) {
|
||||||
|
let request = new XMLHttpRequest();
|
||||||
|
request.open("POST", url, true);
|
||||||
|
request.upload.addEventListener('progress',
|
||||||
|
(p) => {uploadProgress(p, progressBar, progressText, progressValues, file.size, ID);}, true
|
||||||
|
);
|
||||||
|
|
||||||
|
request.onload = () => {
|
||||||
|
if (this.status >= 200 && this.status < 300) {
|
||||||
|
resolve(request.response);
|
||||||
|
} else {
|
||||||
|
reject({status: this.status, statusText: request.statusText});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
request.onerror = () => reject({status: this.status, statusText: request.statusText});
|
||||||
|
request.send(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
chunkUploadArray.push(upload);
|
||||||
|
}
|
||||||
|
await Promise.allSettled(chunkUploadArray);
|
||||||
|
|
||||||
|
// Finish the request and update the progress box
|
||||||
|
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
|
||||||
|
uploadComplete(result, progressBar, progressText, linkRow);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function addNewToList(origFileName) {
|
async function addNewToList(origFileName) {
|
||||||
|
@ -140,13 +158,11 @@ async function addNewToList(origFileName) {
|
||||||
return [linkRow, progressBar, progressTxt];
|
return [linkRow, progressBar, progressTxt];
|
||||||
}
|
}
|
||||||
|
|
||||||
const sumValues = obj => Object.values(obj).reduce((a, b) => a + b, 0);
|
|
||||||
|
|
||||||
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
|
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
|
||||||
if (progress.lengthComputable) {
|
if (progress.lengthComputable) {
|
||||||
progressValues[ID] = progress.loaded;
|
progressValues[ID] = progress.loaded;
|
||||||
|
|
||||||
const progressPercent = Math.floor((sumValues(progressValues) / fileSize) * 100);
|
const progressPercent = Math.floor((progressValues.reduce((a, b) => a + b, 0) / fileSize) * 100);
|
||||||
if (progressPercent == 100) {
|
if (progressPercent == 100) {
|
||||||
progressBar.removeAttribute("value");
|
progressBar.removeAttribute("value");
|
||||||
progressText.textContent = "⏳";
|
progressText.textContent = "⏳";
|
||||||
|
|
Loading…
Reference in a new issue