mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 15:22:57 -05:00
Improved performance by properly limiting the number of uploads
This commit is contained in:
parent
bf17b1bc04
commit
ca63739bfb
2 changed files with 33 additions and 25 deletions
10
src/lib.rs
10
src/lib.rs
|
@ -203,7 +203,7 @@ impl ChunkedResponse {
|
|||
|
||||
/// Start a chunked upload. Response contains all the info you need to continue
|
||||
/// uploading chunks.
|
||||
#[post("/upload/chunked", data = "<file_info>", rank = 2)]
|
||||
#[post("/upload/chunked", data = "<file_info>")]
|
||||
pub async fn chunked_upload_start(
|
||||
db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
|
@ -238,11 +238,11 @@ pub async fn chunked_upload_start(
|
|||
}))
|
||||
}
|
||||
|
||||
#[post("/upload/chunked?<uuid>&<offset>", data = "<data>", rank = 1)]
|
||||
#[post("/upload/chunked/<uuid>?<offset>", data = "<data>")]
|
||||
pub async fn chunked_upload_continue(
|
||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
data: Data<'_>,
|
||||
uuid: String,
|
||||
uuid: &str,
|
||||
offset: u64,
|
||||
) -> Result<(), io::Error> {
|
||||
let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?;
|
||||
|
@ -285,12 +285,12 @@ pub async fn chunked_upload_continue(
|
|||
}
|
||||
|
||||
/// Finalize a chunked upload
|
||||
#[get("/upload/chunked?<uuid>&finish", rank = 3)]
|
||||
#[get("/upload/chunked/<uuid>?finish")]
|
||||
pub async fn chunked_upload_finish(
|
||||
main_db: &State<Arc<RwLock<Mochibase>>>,
|
||||
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
|
||||
settings: &State<Settings>,
|
||||
uuid: String,
|
||||
uuid: &str,
|
||||
) -> Result<Json<MochiFile>, io::Error> {
|
||||
let now = Utc::now();
|
||||
let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?;
|
||||
|
|
|
@ -58,24 +58,25 @@ async function pasteSubmit(evt) {
|
|||
}
|
||||
|
||||
async function sendFiles(files, duration, maxSize) {
|
||||
const uploadArray = [];
|
||||
const inProgressUploads = new Set();
|
||||
const concurrencyLimit = 10;
|
||||
|
||||
for (const file of files) {
|
||||
// Add each upload to the array
|
||||
uploadArray.push(uploadFile(file, duration, maxSize));
|
||||
// Start the upload and add it to the set of in-progress uploads
|
||||
const uploadPromise = uploadFile(file, duration, maxSize);
|
||||
inProgressUploads.add(uploadPromise);
|
||||
|
||||
// If the number of uploads reaches the concurrency limit, wait for them to finish
|
||||
if (uploadArray.length >= concurrencyLimit) {
|
||||
await Promise.allSettled(uploadArray);
|
||||
uploadArray.length = 0; // Clear the array after each batch
|
||||
// Once an upload finishes, remove it from the set
|
||||
uploadPromise.finally(() => inProgressUploads.delete(uploadPromise));
|
||||
|
||||
// If we reached the concurrency limit, wait for one of the uploads to complete
|
||||
if (inProgressUploads.size >= concurrencyLimit) {
|
||||
await Promise.race(inProgressUploads);
|
||||
}
|
||||
}
|
||||
|
||||
// Final batch to handle any remaining files
|
||||
if (uploadArray.length > 0) {
|
||||
await Promise.allSettled(uploadArray);
|
||||
}
|
||||
// Wait for any remaining uploads to complete
|
||||
await Promise.allSettled(inProgressUploads);
|
||||
}
|
||||
|
||||
async function uploadFile(file, duration, maxSize) {
|
||||
|
@ -108,11 +109,12 @@ async function uploadFile(file, duration, maxSize) {
|
|||
}
|
||||
|
||||
// Upload the file in `chunk_size` chunks
|
||||
const chunkUploadArray = [];
|
||||
const chunkUploads = new Set();
|
||||
const progressValues = [];
|
||||
const concurrencyLimit = 4;
|
||||
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
|
||||
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
|
||||
const url = "/upload/chunked?uuid=" + chunkedResponse.uuid + "&offset=" + start;
|
||||
const url = "/upload/chunked/" + chunkedResponse.uuid + "?offset=" + start;
|
||||
const ID = progressValues.push(0);
|
||||
|
||||
let upload = new Promise(function (resolve, reject) {
|
||||
|
@ -122,23 +124,29 @@ async function uploadFile(file, duration, maxSize) {
|
|||
(p) => {uploadProgress(p, progressBar, progressText, progressValues, file.size, ID);}, true
|
||||
);
|
||||
|
||||
request.onload = () => {
|
||||
if (this.status >= 200 && this.status < 300) {
|
||||
request.onload = (e) => {
|
||||
if (e.target.status >= 200 && e.target.status < 300) {
|
||||
resolve(request.response);
|
||||
} else {
|
||||
reject({status: this.status, statusText: request.statusText});
|
||||
reject({status: e.target.status, statusText: request.statusText});
|
||||
}
|
||||
};
|
||||
request.onerror = () => reject({status: this.status, statusText: request.statusText});
|
||||
request.onerror = (e) => {
|
||||
reject({status: e.target.status, statusText: request.statusText})
|
||||
};
|
||||
request.send(chunk);
|
||||
});
|
||||
|
||||
chunkUploadArray.push(upload);
|
||||
chunkUploads.add(upload);
|
||||
upload.finally(() => chunkUploads.delete(upload));
|
||||
if (chunkUploads.size >= concurrencyLimit) {
|
||||
await Promise.race(chunkUploads);
|
||||
}
|
||||
}
|
||||
await Promise.allSettled(chunkUploadArray);
|
||||
await Promise.allSettled(chunkUploads);
|
||||
|
||||
// Finish the request and update the progress box
|
||||
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
|
||||
const result = await fetch("/upload/chunked/" + chunkedResponse.uuid + "?finish");
|
||||
uploadComplete(result, progressBar, progressText, linkRow);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue