mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 23:32:58 -05:00
I think these methods are cursed
This commit is contained in:
parent
4875445325
commit
bf17b1bc04
2 changed files with 92 additions and 72 deletions
12
src/lib.rs
12
src/lib.rs
|
@ -6,7 +6,7 @@ pub mod settings;
|
||||||
pub mod strings;
|
pub mod strings;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
use std::{io, sync::{Arc, RwLock}};
|
use std::{io::{self, ErrorKind}, sync::{Arc, RwLock}};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
pages::{footer, head},
|
pages::{footer, head},
|
||||||
|
@ -203,7 +203,7 @@ impl ChunkedResponse {
|
||||||
|
|
||||||
/// Start a chunked upload. Response contains all the info you need to continue
|
/// Start a chunked upload. Response contains all the info you need to continue
|
||||||
/// uploading chunks.
|
/// uploading chunks.
|
||||||
#[post("/upload/chunked", data = "<file_info>", rank = 1)]
|
#[post("/upload/chunked", data = "<file_info>", rank = 2)]
|
||||||
pub async fn chunked_upload_start(
|
pub async fn chunked_upload_start(
|
||||||
db: &State<Arc<RwLock<Chunkbase>>>,
|
db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
settings: &State<Settings>,
|
settings: &State<Settings>,
|
||||||
|
@ -238,7 +238,7 @@ pub async fn chunked_upload_start(
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/upload/chunked?<uuid>&<offset>", data = "<data>")]
|
#[post("/upload/chunked?<uuid>&<offset>", data = "<data>", rank = 1)]
|
||||||
pub async fn chunked_upload_continue(
|
pub async fn chunked_upload_continue(
|
||||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
data: Data<'_>,
|
data: Data<'_>,
|
||||||
|
@ -264,6 +264,10 @@ pub async fn chunked_upload_continue(
|
||||||
.await?
|
.await?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if offset > chunked_info.size {
|
||||||
|
return Err(io::Error::new(ErrorKind::InvalidInput, "The seek position is larger than the file size"))
|
||||||
|
}
|
||||||
|
|
||||||
file.seek(io::SeekFrom::Start(offset)).await?;
|
file.seek(io::SeekFrom::Start(offset)).await?;
|
||||||
data_stream.stream_to(&mut file).await?.written;
|
data_stream.stream_to(&mut file).await?.written;
|
||||||
file.flush().await?;
|
file.flush().await?;
|
||||||
|
@ -281,7 +285,7 @@ pub async fn chunked_upload_continue(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finalize a chunked upload
|
/// Finalize a chunked upload
|
||||||
#[get("/upload/chunked?<uuid>&finish")]
|
#[get("/upload/chunked?<uuid>&finish", rank = 3)]
|
||||||
pub async fn chunked_upload_finish(
|
pub async fn chunked_upload_finish(
|
||||||
main_db: &State<Arc<RwLock<Mochibase>>>,
|
main_db: &State<Arc<RwLock<Mochibase>>>,
|
||||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||||
|
|
|
@ -10,7 +10,7 @@ async function formSubmit() {
|
||||||
const duration = form.elements.duration.value;
|
const duration = form.elements.duration.value;
|
||||||
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
const maxSize = form.elements.fileUpload.dataset.maxFilesize;
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
|
|
||||||
// Reset the form file data since we've successfully submitted it
|
// Reset the form file data since we've successfully submitted it
|
||||||
form.elements.fileUpload.value = "";
|
form.elements.fileUpload.value = "";
|
||||||
|
@ -39,7 +39,7 @@ async function dragDropSubmit(evt) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function pasteSubmit(evt) {
|
async function pasteSubmit(evt) {
|
||||||
|
@ -54,18 +54,38 @@ async function pasteSubmit(evt) {
|
||||||
files.push(file);
|
files.push(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendFile(files, duration, maxSize);
|
await sendFiles(files, duration, maxSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendFile(files, duration, maxSize) {
|
async function sendFiles(files, duration, maxSize) {
|
||||||
|
const uploadArray = [];
|
||||||
|
const concurrencyLimit = 10;
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
|
// Add each upload to the array
|
||||||
|
uploadArray.push(uploadFile(file, duration, maxSize));
|
||||||
|
|
||||||
|
// If the number of uploads reaches the concurrency limit, wait for them to finish
|
||||||
|
if (uploadArray.length >= concurrencyLimit) {
|
||||||
|
await Promise.allSettled(uploadArray);
|
||||||
|
uploadArray.length = 0; // Clear the array after each batch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Final batch to handle any remaining files
|
||||||
|
if (uploadArray.length > 0) {
|
||||||
|
await Promise.allSettled(uploadArray);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadFile(file, duration, maxSize) {
|
||||||
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
|
||||||
if (file.size > maxSize) {
|
if (file.size > maxSize) {
|
||||||
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
|
||||||
continue;
|
return;
|
||||||
} else if (file.size == 0) {
|
} else if (file.size == 0) {
|
||||||
console.error("Provided file has 0 bytes");
|
console.error("Provided file has 0 bytes");
|
||||||
continue;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get preliminary upload information
|
// Get preliminary upload information
|
||||||
|
@ -88,7 +108,7 @@ async function sendFile(files, duration, maxSize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upload the file in `chunk_size` chunks
|
// Upload the file in `chunk_size` chunks
|
||||||
let uploadArray = [];
|
const chunkUploadArray = [];
|
||||||
const progressValues = [];
|
const progressValues = [];
|
||||||
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
|
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
|
||||||
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
|
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
|
||||||
|
@ -113,15 +133,13 @@ async function sendFile(files, duration, maxSize) {
|
||||||
request.send(chunk);
|
request.send(chunk);
|
||||||
});
|
});
|
||||||
|
|
||||||
uploadArray.push(upload);
|
chunkUploadArray.push(upload);
|
||||||
}
|
}
|
||||||
console.log("Waiting for multiple uploads to complete");
|
await Promise.allSettled(chunkUploadArray);
|
||||||
console.log(await Promise.allSettled(uploadArray));
|
|
||||||
|
|
||||||
// Finish the request and update the progress box
|
// Finish the request and update the progress box
|
||||||
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
|
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
|
||||||
uploadComplete(result, progressBar, progressText, linkRow);
|
uploadComplete(result, progressBar, progressText, linkRow);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function addNewToList(origFileName) {
|
async function addNewToList(origFileName) {
|
||||||
|
@ -140,13 +158,11 @@ async function addNewToList(origFileName) {
|
||||||
return [linkRow, progressBar, progressTxt];
|
return [linkRow, progressBar, progressTxt];
|
||||||
}
|
}
|
||||||
|
|
||||||
const sumValues = obj => Object.values(obj).reduce((a, b) => a + b, 0);
|
|
||||||
|
|
||||||
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
|
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
|
||||||
if (progress.lengthComputable) {
|
if (progress.lengthComputable) {
|
||||||
progressValues[ID] = progress.loaded;
|
progressValues[ID] = progress.loaded;
|
||||||
|
|
||||||
const progressPercent = Math.floor((sumValues(progressValues) / fileSize) * 100);
|
const progressPercent = Math.floor((progressValues.reduce((a, b) => a + b, 0) / fileSize) * 100);
|
||||||
if (progressPercent == 100) {
|
if (progressPercent == 100) {
|
||||||
progressBar.removeAttribute("value");
|
progressBar.removeAttribute("value");
|
||||||
progressText.textContent = "⏳";
|
progressText.textContent = "⏳";
|
||||||
|
|
Loading…
Reference in a new issue