Added poorly implemented progress for chunked uploads

This commit is contained in:
G2-Games 2024-10-31 03:19:28 -05:00
parent 9b80670961
commit 4875445325
2 changed files with 112 additions and 16 deletions

View file

@ -203,7 +203,7 @@ impl ChunkedResponse {
/// Start a chunked upload. Response contains all the info you need to continue /// Start a chunked upload. Response contains all the info you need to continue
/// uploading chunks. /// uploading chunks.
#[post("/upload/chunked", data = "<file_info>")] #[post("/upload/chunked", data = "<file_info>", rank = 1)]
pub async fn chunked_upload_start( pub async fn chunked_upload_start(
db: &State<Arc<RwLock<Chunkbase>>>, db: &State<Arc<RwLock<Chunkbase>>>,
settings: &State<Settings>, settings: &State<Settings>,
@ -238,11 +238,12 @@ pub async fn chunked_upload_start(
})) }))
} }
#[post("/upload/chunked?<uuid>", data = "<data>")] #[post("/upload/chunked?<uuid>&<offset>", data = "<data>")]
pub async fn chunked_upload_continue( pub async fn chunked_upload_continue(
chunk_db: &State<Arc<RwLock<Chunkbase>>>, chunk_db: &State<Arc<RwLock<Chunkbase>>>,
data: Data<'_>, data: Data<'_>,
uuid: String, uuid: String,
offset: u64,
) -> Result<(), io::Error> { ) -> Result<(), io::Error> {
let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?; let uuid = Uuid::parse_str(&uuid).map_err(|e| io::Error::other(e))?;
let data_stream = data.open(101.megabytes()); let data_stream = data.open(101.megabytes());
@ -263,7 +264,7 @@ pub async fn chunked_upload_continue(
.await? .await?
}; };
file.seek(io::SeekFrom::Start(chunked_info.offset)).await?; file.seek(io::SeekFrom::Start(offset)).await?;
data_stream.stream_to(&mut file).await?.written; data_stream.stream_to(&mut file).await?.written;
file.flush().await?; file.flush().await?;
let position = file.stream_position().await?; let position = file.stream_position().await?;
@ -276,18 +277,11 @@ pub async fn chunked_upload_continue(
return Err(io::Error::other("File larger than expected")) return Err(io::Error::other("File larger than expected"))
} }
chunk_db.write()
.unwrap()
.mut_chunks()
.get_mut(&uuid)
.unwrap()
.offset = position;
Ok(()) Ok(())
} }
/// Finalize a chunked upload /// Finalize a chunked upload
#[post("/upload/chunked?<uuid>&finish")] #[get("/upload/chunked?<uuid>&finish")]
pub async fn chunked_upload_finish( pub async fn chunked_upload_finish(
main_db: &State<Arc<RwLock<Mochibase>>>, main_db: &State<Arc<RwLock<Mochibase>>>,
chunk_db: &State<Arc<RwLock<Chunkbase>>>, chunk_db: &State<Arc<RwLock<Chunkbase>>>,

View file

@ -59,6 +59,7 @@ async function pasteSubmit(evt) {
async function sendFile(files, duration, maxSize) { async function sendFile(files, duration, maxSize) {
for (const file of files) { for (const file of files) {
const [linkRow, progressBar, progressText] = await addNewToList(file.name);
if (file.size > maxSize) { if (file.size > maxSize) {
console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes"); console.error("Provided file is too large", file.size, "bytes; max", maxSize, "bytes");
continue; continue;
@ -67,6 +68,7 @@ async function sendFile(files, duration, maxSize) {
continue; continue;
} }
// Get preliminary upload information
let chunkedResponse; let chunkedResponse;
try { try {
const response = await fetch("/upload/chunked", { const response = await fetch("/upload/chunked", {
@ -86,19 +88,119 @@ async function sendFile(files, duration, maxSize) {
} }
// Upload the file in `chunk_size` chunks // Upload the file in `chunk_size` chunks
let uploadArray = [];
const progressValues = [];
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) { for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
const chunk = file.slice(start, start + chunkedResponse.chunk_size) const chunk = file.slice(start, start + chunkedResponse.chunk_size)
const url = "/upload/chunked?uuid=" + chunkedResponse.uuid + "&offset=" + start;
const ID = progressValues.push(0);
await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid, { method: 'post', body: chunk }).then(res => res.text()) let upload = new Promise(function (resolve, reject) {
let request = new XMLHttpRequest();
request.open("POST", url, true);
request.upload.addEventListener('progress',
(p) => {uploadProgress(p, progressBar, progressText, progressValues, file.size, ID);}, true
);
request.onload = () => {
if (this.status >= 200 && this.status < 300) {
resolve(request.response);
} else {
reject({status: this.status, statusText: request.statusText});
} }
};
request.onerror = () => reject({status: this.status, statusText: request.statusText});
request.send(chunk);
});
console.log(await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish", { method: 'post' }).then(res => res.json())) uploadArray.push(upload);
}
console.log("Waiting for multiple uploads to complete");
console.log(await Promise.allSettled(uploadArray));
// Finish the request and update the progress box
const result = await fetch("/upload/chunked?uuid=" + chunkedResponse.uuid + "&finish");
uploadComplete(result, progressBar, progressText, linkRow);
} }
} }
function networkErrorHandler(err, progressBar, progressText, linkRow) { async function addNewToList(origFileName) {
makeErrored(progressBar, progressText, linkRow, "A network error occured"); const uploadedFilesDisplay = document.getElementById("uploadedFilesDisplay");
console.error("A network error occured while uploading", err); const linkRow = uploadedFilesDisplay.appendChild(document.createElement("div"));
const fileName = linkRow.appendChild(document.createElement("p"));
const progressBar = linkRow.appendChild(document.createElement("progress"));
const progressTxt = linkRow.appendChild(document.createElement("p"));
fileName.textContent = origFileName;
fileName.classList.add("file_name");
progressTxt.classList.add("status");
progressBar.max="100";
progressBar.value="0";
return [linkRow, progressBar, progressTxt];
}
const sumValues = obj => Object.values(obj).reduce((a, b) => a + b, 0);
function uploadProgress(progress, progressBar, progressText, progressValues, fileSize, ID) {
if (progress.lengthComputable) {
progressValues[ID] = progress.loaded;
const progressPercent = Math.floor((sumValues(progressValues) / fileSize) * 100);
if (progressPercent == 100) {
progressBar.removeAttribute("value");
progressText.textContent = "⏳";
} else {
progressBar.value = progressPercent;
progressText.textContent = progressPercent + "%";
}
}
}
async function uploadComplete(response, progressBar, progressText, linkRow) {
if (response.status === 200) {
const responseJson = await response.json();
console.log("Successfully uploaded file", responseJson);
makeFinished(progressBar, progressText, linkRow, responseJson);
} else if (response.status === 413) {
makeErrored(progressBar, progressText, linkRow, TOO_LARGE_TEXT);
} else {
makeErrored(progressBar, progressText, linkRow, ERROR_TEXT);
}
}
function makeErrored(progressBar, progressText, linkRow, errorMessage) {
progressText.textContent = errorMessage;
progressBar.style.display = "none";
linkRow.classList.add("upload_failed");
}
function makeFinished(progressBar, progressText, linkRow, response) {
progressText.textContent = "";
const link = progressText.appendChild(document.createElement("a"));
link.textContent = response.mmid;
link.href = "/f/" + response.mmid;
link.target = "_blank";
let button = linkRow.appendChild(document.createElement("button"));
button.textContent = "📝";
let buttonTimeout = null;
button.addEventListener('click', function(_e) {
const mmid = response.mmid;
if (buttonTimeout) {
clearTimeout(buttonTimeout);
}
navigator.clipboard.writeText(
window.location.protocol + "//" + window.location.host + "/f/" + mmid
);
button.textContent = "✅";
buttonTimeout = setTimeout(function() {
button.textContent = "📝";
}, 750);
});
progressBar.style.display = "none";
linkRow.classList.add("upload_done");
} }
async function initEverything() { async function initEverything() {