mirror of
https://github.com/Dangoware/confetti-box.git
synced 2025-04-19 15:22:57 -05:00
Switch database to begin using CBOR, and begin work on chunked uploads
This commit is contained in:
parent
3892975fc2
commit
2b8d7255b8
4 changed files with 82 additions and 51 deletions
|
@ -4,9 +4,9 @@ version = "0.1.2"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
bincode = { version = "2.0.0-rc.3", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["mmap", "rayon", "serde"] }
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
ciborium = "0.2.2"
|
||||
file-format = { version = "0.25.0", features = ["reader"] }
|
||||
log = "0.4"
|
||||
lz4_flex = "0.11.3"
|
||||
|
@ -16,7 +16,7 @@ rocket = { version = "0.5", features = ["json"] }
|
|||
serde = { version = "1.0.213", features = ["derive"] }
|
||||
serde_with = { version = "3.11.0", features = ["chrono_0_4"] }
|
||||
toml = "0.8.19"
|
||||
uuid = { version = "1.11.0", features = ["v4"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
|
||||
[profile.production]
|
||||
inherits = "release"
|
||||
|
|
|
@ -7,30 +7,29 @@ use std::{
|
|||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use bincode::{config::Configuration, decode_from_std_read, encode_into_std_write, Decode, Encode};
|
||||
use blake3::Hash;
|
||||
use chrono::{DateTime, TimeDelta, Utc};
|
||||
use ciborium::{from_reader, into_writer};
|
||||
use log::{error, info, warn};
|
||||
use rand::distributions::{Alphanumeric, DistString};
|
||||
use rocket::{
|
||||
serde::{Deserialize, Serialize},
|
||||
tokio::{select, sync::mpsc::Receiver, time},
|
||||
form::{self, FromFormField, ValueField}, serde::{Deserialize, Serialize}, tokio::{select, sync::mpsc::Receiver, time}
|
||||
};
|
||||
use serde_with::{serde_as, DisplayFromStr};
|
||||
use uuid::Uuid;
|
||||
|
||||
const BINCODE_CFG: Configuration = bincode::config::standard();
|
||||
|
||||
#[derive(Debug, Clone, Decode, Encode)]
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Mochibase {
|
||||
path: PathBuf,
|
||||
|
||||
/// Every hash in the database along with the [`Mmid`]s associated with them
|
||||
#[bincode(with_serde)]
|
||||
hashes: HashMap<Hash, HashSet<Mmid>>,
|
||||
|
||||
/// All entries in the database
|
||||
#[bincode(with_serde)]
|
||||
entries: HashMap<Mmid, MochiFile>,
|
||||
|
||||
chunks: HashMap<Uuid, DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl Mochibase {
|
||||
|
@ -39,6 +38,7 @@ impl Mochibase {
|
|||
path: path.as_ref().to_path_buf(),
|
||||
entries: HashMap::new(),
|
||||
hashes: HashMap::new(),
|
||||
chunks: HashMap::new(),
|
||||
};
|
||||
|
||||
// Save the database initially after creating it
|
||||
|
@ -52,7 +52,7 @@ impl Mochibase {
|
|||
let file = File::open(path)?;
|
||||
let mut lz4_file = lz4_flex::frame::FrameDecoder::new(file);
|
||||
|
||||
decode_from_std_read(&mut lz4_file, BINCODE_CFG)
|
||||
from_reader(&mut lz4_file)
|
||||
.map_err(|e| io::Error::other(format!("failed to open database: {e}")))
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ impl Mochibase {
|
|||
// Create a file and write the LZ4 compressed stream into it
|
||||
let file = File::create(self.path.with_extension("bkp"))?;
|
||||
let mut lz4_file = lz4_flex::frame::FrameEncoder::new(file);
|
||||
encode_into_std_write(self, &mut lz4_file, BINCODE_CFG)
|
||||
into_writer(self, &mut lz4_file)
|
||||
.map_err(|e| io::Error::other(format!("failed to save database: {e}")))?;
|
||||
lz4_file.try_finish()?;
|
||||
|
||||
|
@ -154,7 +154,8 @@ impl Mochibase {
|
|||
|
||||
/// An entry in the database storing metadata about a file
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, Decode, Encode, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct MochiFile {
|
||||
/// A unique identifier describing this file
|
||||
mmid: Mmid,
|
||||
|
@ -166,16 +167,13 @@ pub struct MochiFile {
|
|||
mime_type: String,
|
||||
|
||||
/// The Blake3 hash of the file
|
||||
#[bincode(with_serde)]
|
||||
#[serde_as(as = "DisplayFromStr")]
|
||||
hash: Hash,
|
||||
|
||||
/// The datetime when the file was uploaded
|
||||
#[bincode(with_serde)]
|
||||
upload_datetime: DateTime<Utc>,
|
||||
|
||||
/// The datetime when the file is set to expire
|
||||
#[bincode(with_serde)]
|
||||
expiry_datetime: DateTime<Utc>,
|
||||
}
|
||||
|
||||
|
@ -285,7 +283,8 @@ pub async fn clean_loop(
|
|||
|
||||
/// A unique identifier for an entry in the database, 8 characters long,
|
||||
/// consists of ASCII alphanumeric characters (`a-z`, `A-Z`, and `0-9`).
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash, Decode, Encode, Deserialize, Serialize)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Mmid(String);
|
||||
|
||||
impl Mmid {
|
||||
|
@ -347,3 +346,12 @@ impl std::fmt::Display for Mmid {
|
|||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromFormField<'r> for Mmid {
|
||||
fn from_value(field: ValueField<'r>) -> form::Result<'r, Self> {
|
||||
Ok(
|
||||
Self::try_from(field.value).map_err(|_| form::Error::validation("Invalid MMID"))?
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
89
src/lib.rs
89
src/lib.rs
|
@ -8,20 +8,17 @@ pub mod utils;
|
|||
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use crate::database::{Mmid, MochiFile, Mochibase};
|
||||
use crate::pages::{footer, head};
|
||||
use crate::settings::Settings;
|
||||
use crate::strings::{parse_time_string, to_pretty_time};
|
||||
use crate::utils::hash_file;
|
||||
use crate::{
|
||||
database::{Mmid, MochiFile, Mochibase},
|
||||
pages::{footer, head},
|
||||
settings::Settings,
|
||||
strings::{parse_time_string, to_pretty_time},
|
||||
utils::hash_file,
|
||||
};
|
||||
use chrono::{DateTime, Utc};
|
||||
use maud::{html, Markup, PreEscaped};
|
||||
use rocket::{
|
||||
data::ToByteUnit,
|
||||
form::Form,
|
||||
fs::TempFile,
|
||||
get, post,
|
||||
serde::{json::Json, Serialize},
|
||||
FromForm, State,
|
||||
data::ToByteUnit, form::Form, fs::TempFile, get, post, serde::{json::Json, Serialize}, FromForm, State
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
|
@ -78,6 +75,34 @@ pub struct Upload<'r> {
|
|||
file: TempFile<'r>,
|
||||
}
|
||||
|
||||
/// A response to the client from the server
|
||||
#[derive(Serialize, Default, Debug)]
|
||||
pub struct ClientResponse {
|
||||
/// Success or failure
|
||||
pub status: bool,
|
||||
|
||||
pub response: &'static str,
|
||||
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mmid: Option<Mmid>,
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub hash: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expires: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
impl ClientResponse {
|
||||
fn failure(response: &'static str) -> Self {
|
||||
Self {
|
||||
status: false,
|
||||
response,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle a file upload and store it
|
||||
#[post("/upload", data = "<file_data>")]
|
||||
pub async fn handle_upload(
|
||||
|
@ -152,30 +177,28 @@ pub async fn handle_upload(
|
|||
}))
|
||||
}
|
||||
|
||||
/// A response to the client from the server
|
||||
#[derive(Serialize, Default, Debug)]
|
||||
pub struct ClientResponse {
|
||||
/// Success or failure
|
||||
pub status: bool,
|
||||
pub struct ChunkedResponse {
|
||||
/// UUID used for associating the chunk with the final file
|
||||
uuid: Uuid,
|
||||
|
||||
pub response: &'static str,
|
||||
/// Valid max chunk size in bytes
|
||||
chunk_size: u64,
|
||||
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mmid: Option<Mmid>,
|
||||
#[serde(skip_serializing_if = "str::is_empty")]
|
||||
pub hash: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expires: Option<DateTime<Utc>>,
|
||||
/// The datetime at which the upload will be invalidated unless new
|
||||
/// chunks have come in
|
||||
timeout: DateTime<Utc>,
|
||||
|
||||
/// The datetime at which the upload will be invalidated even if new
|
||||
/// chunks have come in
|
||||
hard_timeout: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl ClientResponse {
|
||||
fn failure(response: &'static str) -> Self {
|
||||
Self {
|
||||
status: false,
|
||||
response,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
/// Start a chunked upload. Response contains all the info you need to continue
|
||||
/// uploading chunks.
|
||||
#[get("/upload/chunked")]
|
||||
pub async fn chunked_start() -> Result<Json<ClientResponse>, std::io::Error> {
|
||||
|
||||
|
||||
|
||||
todo!()
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ pub struct Settings {
|
|||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_filesize: 1.megabytes().into(), // 128 MB
|
||||
max_filesize: 1.megabytes().into(), // 1 MB
|
||||
overwrite: true,
|
||||
duration: DurationSettings::default(),
|
||||
server: ServerSettings::default(),
|
||||
|
|
Loading…
Reference in a new issue