From 60dd2ea4219541cfe54672eba3e9c2289135d577 Mon Sep 17 00:00:00 2001 From: G2-Games Date: Mon, 4 Nov 2024 05:05:58 -0600 Subject: [PATCH] Changed chunked upload URL to take chunk number instead of offset --- src/database.rs | 4 ++++ src/lib.rs | 27 ++++++++++++++++++++++----- src/main.rs | 1 - src/settings.rs | 11 ++--------- web/request.js | 9 +++++---- 5 files changed, 33 insertions(+), 19 deletions(-) diff --git a/src/database.rs b/src/database.rs index 34f846f..51445ae 100644 --- a/src/database.rs +++ b/src/database.rs @@ -384,6 +384,10 @@ pub struct ChunkedInfo { #[serde_as(as = "serde_with::DurationSeconds")] pub expire_duration: TimeDelta, + /// Tracks which chunks have already been recieved, so you can't overwrite + /// some wrong part of a file + #[serde(skip)] + pub recieved_chunks: HashSet, #[serde(skip)] pub path: PathBuf, #[serde(skip)] diff --git a/src/lib.rs b/src/lib.rs index 36cea50..192aa7c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -141,13 +141,13 @@ pub async fn chunked_upload_start( })) } -#[post("/upload/chunked/?", data = "")] +#[post("/upload/chunked/?", data = "")] pub async fn chunked_upload_continue( chunk_db: &State>>, settings: &State, data: Data<'_>, uuid: &str, - offset: u64, + chunk: u64, ) -> Result<(), io::Error> { let uuid = Uuid::parse_str(uuid).map_err(io::Error::other)?; let data_stream = data.open((settings.chunk_size + 100).bytes()); @@ -156,6 +156,9 @@ pub async fn chunked_upload_continue( Some(s) => s.clone(), None => return Err(io::Error::other("Invalid UUID")), }; + if chunked_info.1.recieved_chunks.contains(&chunk) { + return Err(io::Error::new(ErrorKind::Other, "Chunk already uploaded")); + } let mut file = fs::File::options() .read(true) @@ -164,22 +167,36 @@ pub async fn chunked_upload_continue( .open(&chunked_info.1.path) .await?; - if offset > chunked_info.1.size { + let offset = chunk * settings.chunk_size; + if (offset > chunked_info.1.size) | (offset > settings.max_filesize) { return Err(io::Error::new( ErrorKind::InvalidInput, - "The seek position is larger than the file size", + "Invalid chunk number for file", )); } file.seek(io::SeekFrom::Start(offset)).await?; - data_stream.stream_to(&mut file).await?; + let written = data_stream.stream_to(&mut file).await?.written; file.flush().await?; let position = file.stream_position().await?; + if written > settings.chunk_size { + chunk_db.write().unwrap().mut_chunks().remove(&uuid); + return Err(io::Error::other("Wrote more than one chunk")); + } if position > chunked_info.1.size { chunk_db.write().unwrap().mut_chunks().remove(&uuid); return Err(io::Error::other("File larger than expected")); } + chunk_db + .write() + .unwrap() + .mut_chunks() + .get_mut(&uuid) + .unwrap() + .1 + .recieved_chunks + .insert(chunk); Ok(()) } diff --git a/src/main.rs b/src/main.rs index f99bf5d..608ded3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -136,7 +136,6 @@ pub async fn clean_loop( pub async fn clean_chunks(chunk_db: Arc>, mut shutdown_signal: Receiver<()>) { let mut interval = time::interval(TimeDelta::seconds(30).to_std().unwrap()); - loop { select! { _ = interval.tick() => {let _ = chunk_db.write().unwrap().delete_timed_out();}, diff --git a/src/settings.rs b/src/settings.rs index 2f3592a..641ed9e 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -11,38 +11,31 @@ use serde_with::serde_as; /// A response to the client from the server #[derive(Deserialize, Serialize, Debug)] -#[serde(crate = "rocket::serde")] +#[serde(default)] pub struct Settings { /// Maximum filesize in bytes - #[serde(default)] pub max_filesize: u64, /// Maximum filesize in bytes - #[serde(default)] pub chunk_size: u64, /// Is overwiting already uploaded files with the same hash allowed, or is /// this a no-op? - #[serde(default)] pub overwrite: bool, /// Settings pertaining to duration information pub duration: DurationSettings, /// The path to the database file - #[serde(default)] pub database_path: PathBuf, /// Temporary directory for stuff - #[serde(default)] pub temp_dir: PathBuf, /// Directory in which to store hosted files - #[serde(default)] pub file_dir: PathBuf, /// Settings pertaining to the server configuration - #[serde(default)] pub server: ServerSettings, #[serde(skip)] @@ -53,7 +46,7 @@ impl Default for Settings { fn default() -> Self { Self { max_filesize: 25.megabytes().into(), // 1 MB - chunk_size: 1.megabytes().into(), + chunk_size: 10.megabytes().into(), overwrite: true, duration: DurationSettings::default(), server: ServerSettings::default(), diff --git a/web/request.js b/web/request.js index bb66aaa..0d32057 100644 --- a/web/request.js +++ b/web/request.js @@ -114,10 +114,11 @@ async function uploadFile(file, duration, maxSize) { // Upload the file in `chunk_size` chunks const chunkUploads = new Set(); const progressValues = []; - const concurrencyLimit = 4; - for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) { - const chunk = file.slice(start, start + chunkedResponse.chunk_size) - const url = "/upload/chunked/" + chunkedResponse.uuid + "?offset=" + start; + const concurrencyLimit = 5; + for (let chunk_num = 0; chunk_num < Math.floor(file.size / chunkedResponse.chunk_size) + 1; chunk_num ++) { + const offset = Math.floor(chunk_num * chunkedResponse.chunk_size); + const chunk = file.slice(offset, offset + chunkedResponse.chunk_size); + const url = "/upload/chunked/" + chunkedResponse.uuid + "?chunk=" + chunk_num; const ID = progressValues.push(0); let upload = new Promise(function (resolve, reject) {