Changed chunked upload URL to take chunk number instead of offset

This commit is contained in:
G2-Games 2024-11-04 05:05:58 -06:00
parent e78c61b410
commit 60dd2ea421
5 changed files with 33 additions and 19 deletions

View file

@ -384,6 +384,10 @@ pub struct ChunkedInfo {
#[serde_as(as = "serde_with::DurationSeconds<i64>")]
pub expire_duration: TimeDelta,
/// Tracks which chunks have already been recieved, so you can't overwrite
/// some wrong part of a file
#[serde(skip)]
pub recieved_chunks: HashSet<u64>,
#[serde(skip)]
pub path: PathBuf,
#[serde(skip)]

View file

@ -141,13 +141,13 @@ pub async fn chunked_upload_start(
}))
}
#[post("/upload/chunked/<uuid>?<offset>", data = "<data>")]
#[post("/upload/chunked/<uuid>?<chunk>", data = "<data>")]
pub async fn chunked_upload_continue(
chunk_db: &State<Arc<RwLock<Chunkbase>>>,
settings: &State<Settings>,
data: Data<'_>,
uuid: &str,
offset: u64,
chunk: u64,
) -> Result<(), io::Error> {
let uuid = Uuid::parse_str(uuid).map_err(io::Error::other)?;
let data_stream = data.open((settings.chunk_size + 100).bytes());
@ -156,6 +156,9 @@ pub async fn chunked_upload_continue(
Some(s) => s.clone(),
None => return Err(io::Error::other("Invalid UUID")),
};
if chunked_info.1.recieved_chunks.contains(&chunk) {
return Err(io::Error::new(ErrorKind::Other, "Chunk already uploaded"));
}
let mut file = fs::File::options()
.read(true)
@ -164,22 +167,36 @@ pub async fn chunked_upload_continue(
.open(&chunked_info.1.path)
.await?;
if offset > chunked_info.1.size {
let offset = chunk * settings.chunk_size;
if (offset > chunked_info.1.size) | (offset > settings.max_filesize) {
return Err(io::Error::new(
ErrorKind::InvalidInput,
"The seek position is larger than the file size",
"Invalid chunk number for file",
));
}
file.seek(io::SeekFrom::Start(offset)).await?;
data_stream.stream_to(&mut file).await?;
let written = data_stream.stream_to(&mut file).await?.written;
file.flush().await?;
let position = file.stream_position().await?;
if written > settings.chunk_size {
chunk_db.write().unwrap().mut_chunks().remove(&uuid);
return Err(io::Error::other("Wrote more than one chunk"));
}
if position > chunked_info.1.size {
chunk_db.write().unwrap().mut_chunks().remove(&uuid);
return Err(io::Error::other("File larger than expected"));
}
chunk_db
.write()
.unwrap()
.mut_chunks()
.get_mut(&uuid)
.unwrap()
.1
.recieved_chunks
.insert(chunk);
Ok(())
}

View file

@ -136,7 +136,6 @@ pub async fn clean_loop(
pub async fn clean_chunks(chunk_db: Arc<RwLock<Chunkbase>>, mut shutdown_signal: Receiver<()>) {
let mut interval = time::interval(TimeDelta::seconds(30).to_std().unwrap());
loop {
select! {
_ = interval.tick() => {let _ = chunk_db.write().unwrap().delete_timed_out();},

View file

@ -11,38 +11,31 @@ use serde_with::serde_as;
/// A response to the client from the server
#[derive(Deserialize, Serialize, Debug)]
#[serde(crate = "rocket::serde")]
#[serde(default)]
pub struct Settings {
/// Maximum filesize in bytes
#[serde(default)]
pub max_filesize: u64,
/// Maximum filesize in bytes
#[serde(default)]
pub chunk_size: u64,
/// Is overwiting already uploaded files with the same hash allowed, or is
/// this a no-op?
#[serde(default)]
pub overwrite: bool,
/// Settings pertaining to duration information
pub duration: DurationSettings,
/// The path to the database file
#[serde(default)]
pub database_path: PathBuf,
/// Temporary directory for stuff
#[serde(default)]
pub temp_dir: PathBuf,
/// Directory in which to store hosted files
#[serde(default)]
pub file_dir: PathBuf,
/// Settings pertaining to the server configuration
#[serde(default)]
pub server: ServerSettings,
#[serde(skip)]
@ -53,7 +46,7 @@ impl Default for Settings {
fn default() -> Self {
Self {
max_filesize: 25.megabytes().into(), // 1 MB
chunk_size: 1.megabytes().into(),
chunk_size: 10.megabytes().into(),
overwrite: true,
duration: DurationSettings::default(),
server: ServerSettings::default(),

View file

@ -114,10 +114,11 @@ async function uploadFile(file, duration, maxSize) {
// Upload the file in `chunk_size` chunks
const chunkUploads = new Set();
const progressValues = [];
const concurrencyLimit = 4;
for (let start = 0; start < file.size; start += chunkedResponse.chunk_size) {
const chunk = file.slice(start, start + chunkedResponse.chunk_size)
const url = "/upload/chunked/" + chunkedResponse.uuid + "?offset=" + start;
const concurrencyLimit = 5;
for (let chunk_num = 0; chunk_num < Math.floor(file.size / chunkedResponse.chunk_size) + 1; chunk_num ++) {
const offset = Math.floor(chunk_num * chunkedResponse.chunk_size);
const chunk = file.slice(offset, offset + chunkedResponse.chunk_size);
const url = "/upload/chunked/" + chunkedResponse.uuid + "?chunk=" + chunk_num;
const ID = progressValues.push(0);
let upload = new Promise(function (resolve, reject) {