Significantly improved speed of decompression and paralellized it

This commit is contained in:
G2-Games 2024-07-25 04:32:12 -05:00
parent 85cc2dea22
commit 2c3c1d942b
4 changed files with 32 additions and 13 deletions

1
.gitignore vendored
View file

@ -7,3 +7,4 @@ Cargo.lock
*.dpf *.dpf
*.raw *.raw
*.rgba *.rgba
*.jpg

View file

@ -6,4 +6,5 @@ edition = "2021"
[dependencies] [dependencies]
byteorder = "1.5.0" byteorder = "1.5.0"
image = "0.25.2" image = "0.25.2"
rayon = "1.10.0"
thiserror = "1.0.63" thiserror = "1.0.63"

View file

@ -4,6 +4,8 @@ use std::{
}; };
use byteorder::{ReadBytesExt, WriteBytesExt, LE}; use byteorder::{ReadBytesExt, WriteBytesExt, LE};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use thiserror::Error;
use crate::binio::{BitReader, BitWriter}; use crate::binio::{BitReader, BitWriter};
@ -43,6 +45,12 @@ impl CompressionInfo {
} }
} }
#[derive(Debug, Error)]
enum CompressionError {
#[error("bad compressed element \"{}\" at position {}", 0, 1)]
BadElement(u8, usize)
}
pub fn compress(data: &[u8]) -> (Vec<u8>, CompressionInfo) { pub fn compress(data: &[u8]) -> (Vec<u8>, CompressionInfo) {
let mut part_data; let mut part_data;
@ -151,25 +159,31 @@ fn compress_lzw(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
pub fn decompress<T: ReadBytesExt + Read>(input: &mut T, chunk_info: &CompressionInfo) -> Vec<u8> { pub fn decompress<T: ReadBytesExt + Read>(input: &mut T, chunk_info: &CompressionInfo) -> Vec<u8> {
let mut output_buf: Vec<u8> = vec![]; let mut output_buf: Vec<u8> = vec![];
for block in &chunk_info.chunks { let mut compressed_chunks = Vec::new();
let mut buffer = vec![0u8; block.size_compressed]; for chunk_info in &chunk_info.chunks {
let mut buffer = vec![0u8; chunk_info.size_compressed];
input.read_exact(&mut buffer).unwrap(); input.read_exact(&mut buffer).unwrap();
let raw_buf = decompress_lzw(&buffer, block.size_raw); compressed_chunks.push((buffer, chunk_info.size_raw));
output_buf.write_all(&raw_buf).unwrap();
} }
let decompressed_chunks: Vec<Vec<u8>> = compressed_chunks
.par_iter()
.map(|chunk| decompress_lzw(&chunk.0, chunk.1).unwrap())
.collect();
decompressed_chunks.iter().for_each(|c| output_buf.write_all(&c).unwrap());
output_buf output_buf
} }
fn decompress_lzw(input_data: &[u8], size: usize) -> Vec<u8> { fn decompress_lzw(input_data: &[u8], size: usize) -> Result<Vec<u8>, CompressionError> {
let mut data = Cursor::new(input_data); let mut data = Cursor::new(input_data);
// Build the initial dictionary of 256 values // Build the initial dictionary of 256 values
let mut dictionary = HashMap::new(); let mut dictionary = Vec::new();
for i in 0..256 { for i in 0..256 {
dictionary.insert(i as u64, vec![i as u8]); dictionary.push(vec![i as u8]);
} }
let mut dictionary_count = dictionary.len() as u64; let mut dictionary_count = dictionary.len() as u64;
@ -177,7 +191,7 @@ fn decompress_lzw(input_data: &[u8], size: usize) -> Vec<u8> {
let data_size = input_data.len(); let data_size = input_data.len();
let mut bit_io = BitReader::new(&mut data); let mut bit_io = BitReader::new(&mut data);
let mut w = dictionary.get(&0).unwrap().clone(); let mut w = dictionary.get(0).unwrap().clone();
let mut element; let mut element;
loop { loop {
@ -193,7 +207,7 @@ fn decompress_lzw(input_data: &[u8], size: usize) -> Vec<u8> {
} }
let mut entry; let mut entry;
if let Some(x) = dictionary.get(&element) { if let Some(x) = dictionary.get(element as usize) {
// If the element was already in the dict, get it // If the element was already in the dict, get it
entry = x.clone() entry = x.clone()
} else if element == dictionary_count { } else if element == dictionary_count {
@ -205,9 +219,10 @@ fn decompress_lzw(input_data: &[u8], size: usize) -> Vec<u8> {
result.write_all(&entry).unwrap(); result.write_all(&entry).unwrap();
w.push(entry[0]); w.push(entry[0]);
dictionary.insert(dictionary_count, w.clone()); dictionary.push(w.clone());
dictionary_count += 1; dictionary_count += 1;
w.clone_from(&entry); w.clone_from(&entry);
} }
result
Ok(result)
} }

View file

@ -17,13 +17,15 @@ use std::{
use image::RgbaImage; use image::RgbaImage;
fn main() { fn main() {
let image_data = image::open("littlespace.png").unwrap().to_rgba8(); /*
let image_data = image::open("kirara_motorbike.jpg").unwrap().to_rgba8();
let encoded_dpf = DangoPicture::from_raw(image_data.width(), image_data.height(), &image_data); let encoded_dpf = DangoPicture::from_raw(image_data.width(), image_data.height(), &image_data);
let timer = Instant::now(); let timer = Instant::now();
let mut outfile = BufWriter::new(File::create("test.dpf").unwrap()); let mut outfile = BufWriter::new(File::create("test.dpf").unwrap());
encoded_dpf.encode(&mut outfile); encoded_dpf.encode(&mut outfile);
println!("Encoding took {}ms", timer.elapsed().as_millis()); println!("Encoding took {}ms", timer.elapsed().as_millis());
*/
let timer = Instant::now(); let timer = Instant::now();
let mut infile = BufReader::new(File::open("test.dpf").unwrap()); let mut infile = BufReader::new(File::open("test.dpf").unwrap());