mirror of
https://github.com/Dangoware/sqp.git
synced 2025-04-19 07:12:55 -05:00
Added decoding as well as moved some things around
This commit is contained in:
parent
f1013cf165
commit
a78181ee55
6 changed files with 1218 additions and 34 deletions
1000
Cargo.lock
generated
1000
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -5,3 +5,4 @@ edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
byteorder = "1.5.0"
|
byteorder = "1.5.0"
|
||||||
|
image = "0.25.2"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::{collections::HashMap, io::Write};
|
use std::{collections::HashMap, io::{Read, Write}};
|
||||||
|
|
||||||
use byteorder::{WriteBytesExt, LE};
|
use byteorder::{ReadBytesExt, WriteBytesExt, LE};
|
||||||
|
|
||||||
use crate::binio::BitIo;
|
use crate::binio::BitIo;
|
||||||
|
|
||||||
|
@ -20,14 +20,8 @@ pub struct CompressionInfo {
|
||||||
/// Number of compression chunks
|
/// Number of compression chunks
|
||||||
pub chunk_count: usize,
|
pub chunk_count: usize,
|
||||||
|
|
||||||
/// Total size of the data when compressed
|
|
||||||
pub total_size_compressed: usize,
|
|
||||||
|
|
||||||
/// The compression chunk information
|
/// The compression chunk information
|
||||||
pub chunks: Vec<ChunkInfo>,
|
pub chunks: Vec<ChunkInfo>,
|
||||||
|
|
||||||
/// Length of the compression chunk info
|
|
||||||
pub length: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompressionInfo {
|
impl CompressionInfo {
|
||||||
|
@ -46,7 +40,7 @@ impl CompressionInfo {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compress2(data: &[u8]) -> (Vec<u8>, CompressionInfo) {
|
pub fn compress(data: &[u8]) -> (Vec<u8>, CompressionInfo) {
|
||||||
let mut part_data;
|
let mut part_data;
|
||||||
|
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
|
@ -59,7 +53,7 @@ pub fn compress2(data: &[u8]) -> (Vec<u8>, CompressionInfo) {
|
||||||
};
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
(count, part_data, last) = compress_lzw2(&data[offset..], last);
|
(count, part_data, last) = compress_lzw(&data[offset..], last);
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -79,11 +73,10 @@ pub fn compress2(data: &[u8]) -> (Vec<u8>, CompressionInfo) {
|
||||||
panic!("No chunks compressed!")
|
panic!("No chunks compressed!")
|
||||||
}
|
}
|
||||||
|
|
||||||
output_info.total_size_compressed = output_buf.len();
|
|
||||||
(output_buf, output_info)
|
(output_buf, output_info)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
fn compress_lzw(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
let mut dictionary = HashMap::new();
|
let mut dictionary = HashMap::new();
|
||||||
for i in 0..=255 {
|
for i in 0..=255 {
|
||||||
|
@ -136,7 +129,7 @@ fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return (count, bit_io.bytes(), Vec::new());
|
return (count, bit_io.bytes(), Vec::new());
|
||||||
} else if bit_io.byte_size() < 0x87BDF {
|
} else if dictionary_count < 0x3FFFE {
|
||||||
if !last_element.is_empty() {
|
if !last_element.is_empty() {
|
||||||
write_bit(&mut bit_io, *dictionary.get(&last_element).unwrap());
|
write_bit(&mut bit_io, *dictionary.get(&last_element).unwrap());
|
||||||
}
|
}
|
||||||
|
@ -145,3 +138,68 @@ fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
||||||
|
|
||||||
(count, bit_io.bytes(), last_element)
|
(count, bit_io.bytes(), last_element)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn decompress<T: ReadBytesExt + Read>(
|
||||||
|
input: &mut T,
|
||||||
|
chunk_info: &CompressionInfo,
|
||||||
|
) -> Vec<u8> {
|
||||||
|
let mut output_buf: Vec<u8> = vec![];
|
||||||
|
|
||||||
|
for block in &chunk_info.chunks {
|
||||||
|
let mut buffer = vec![0u8; block.size_compressed];
|
||||||
|
input.read_exact(&mut buffer).unwrap();
|
||||||
|
|
||||||
|
let raw_buf = decompress_lzw(&buffer, block.size_raw);
|
||||||
|
|
||||||
|
output_buf.write_all(&raw_buf).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
output_buf
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decompress_lzw(input_data: &[u8], size: usize) -> Vec<u8> {
|
||||||
|
let mut data = input_data.to_vec();
|
||||||
|
let mut dictionary = HashMap::new();
|
||||||
|
for i in 0..256 {
|
||||||
|
dictionary.insert(i as u64, vec![i as u8]);
|
||||||
|
}
|
||||||
|
let mut dictionary_count = dictionary.len() as u64;
|
||||||
|
let mut result = Vec::with_capacity(size);
|
||||||
|
|
||||||
|
let data_size = input_data.len();
|
||||||
|
data.extend_from_slice(&[0, 0]);
|
||||||
|
let mut bit_io = BitIo::new(data);
|
||||||
|
let mut w = dictionary.get(&0).unwrap().clone();
|
||||||
|
|
||||||
|
let mut element;
|
||||||
|
loop {
|
||||||
|
let flag = bit_io.read_bit(1);
|
||||||
|
if flag == 0 {
|
||||||
|
element = bit_io.read_bit(15);
|
||||||
|
} else {
|
||||||
|
element = bit_io.read_bit(18);
|
||||||
|
}
|
||||||
|
|
||||||
|
if bit_io.byte_offset() > data_size {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut entry;
|
||||||
|
if let Some(x) = dictionary.get(&element) {
|
||||||
|
// If the element was already in the dict, get it
|
||||||
|
entry = x.clone()
|
||||||
|
} else if element == dictionary_count {
|
||||||
|
entry = w.clone();
|
||||||
|
entry.push(w[0])
|
||||||
|
} else {
|
||||||
|
panic!("Bad compressed element: {}", element)
|
||||||
|
}
|
||||||
|
|
||||||
|
result.write_all(&entry).unwrap();
|
||||||
|
w.push(entry[0]);
|
||||||
|
dictionary.insert(dictionary_count, w.clone());
|
||||||
|
dictionary_count += 1;
|
||||||
|
w.clone_from(&entry);
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
|
@ -8,6 +8,16 @@ pub struct Header {
|
||||||
pub height: u32,
|
pub height: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for Header {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
magic: *b"dangoimg",
|
||||||
|
width: 0,
|
||||||
|
height: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Header {
|
impl Header {
|
||||||
pub fn to_bytes(&self) -> [u8; 16] {
|
pub fn to_bytes(&self) -> [u8; 16] {
|
||||||
let mut buf = Cursor::new(Vec::new());
|
let mut buf = Cursor::new(Vec::new());
|
||||||
|
|
92
src/main.rs
92
src/main.rs
|
@ -1,35 +1,99 @@
|
||||||
mod binio;
|
|
||||||
mod compression;
|
mod compression;
|
||||||
mod header;
|
mod header;
|
||||||
mod operations;
|
mod operations;
|
||||||
|
mod binio;
|
||||||
|
|
||||||
use std::{fs::{read, File}, io::Write};
|
use std::{fs::File, io::{Read, Write}};
|
||||||
|
|
||||||
use compression::compress2;
|
use byteorder::{ReadBytesExt, WriteBytesExt, LE};
|
||||||
|
use compression::{compress, decompress, ChunkInfo, CompressionInfo};
|
||||||
use header::Header;
|
use header::Header;
|
||||||
use operations::diff_line;
|
use image::RgbaImage;
|
||||||
|
use operations::{diff_line, line_diff};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let image_data = read("littlespace.rgba").unwrap();
|
let image_data = image::open("dripping.png").unwrap().to_rgba8();
|
||||||
let mut file = File::create("test.dpf").unwrap();
|
let encoded_dpf = DangoPicture {
|
||||||
|
header: Header {
|
||||||
|
width: image_data.width(),
|
||||||
|
height: image_data.height(),
|
||||||
|
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
bitmap: image_data.into_vec(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut outfile = File::create("test.dpf").unwrap();
|
||||||
|
encoded_dpf.encode(&mut outfile);
|
||||||
|
|
||||||
|
|
||||||
|
let mut infile = File::open("test.dpf").unwrap();
|
||||||
|
let decoded_dpf = DangoPicture::decode(&mut infile);
|
||||||
|
let out_image = RgbaImage::from_raw(
|
||||||
|
decoded_dpf.header.width,
|
||||||
|
decoded_dpf.header.height,
|
||||||
|
decoded_dpf.bitmap
|
||||||
|
).unwrap();
|
||||||
|
out_image.save("test2.png").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DangoPicture {
|
||||||
|
header: Header,
|
||||||
|
bitmap: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DangoPicture {
|
||||||
|
fn encode<O: Write + WriteBytesExt>(&self, mut output: O) {
|
||||||
let header = Header {
|
let header = Header {
|
||||||
magic: *b"dangoimg",
|
width: self.header.width,
|
||||||
width: 64,
|
height: self.header.height,
|
||||||
height: 64,
|
|
||||||
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Write out the header
|
// Write out the header
|
||||||
file.write_all(&header.to_bytes()).unwrap();
|
output.write_all(&header.to_bytes()).unwrap();
|
||||||
|
|
||||||
let modified_data = diff_line(header.width, header.height, &image_data);
|
let modified_data = diff_line(header.width, header.height, &self.bitmap);
|
||||||
|
|
||||||
// Compress the image data
|
// Compress the image data
|
||||||
let (compressed_data, compression_info) = compress2(&modified_data);
|
let (compressed_data, compression_info) = compress(&modified_data);
|
||||||
|
|
||||||
// Write out compression info
|
// Write out compression info
|
||||||
compression_info.write_into(&mut file).unwrap();
|
compression_info.write_into(&mut output).unwrap();
|
||||||
|
|
||||||
// Write out compressed data
|
// Write out compressed data
|
||||||
file.write_all(&compressed_data).unwrap();
|
output.write_all(&compressed_data).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decode<I: Read + ReadBytesExt>(mut input: I) -> DangoPicture {
|
||||||
|
let mut magic = [0u8; 8];
|
||||||
|
input.read_exact(&mut magic).unwrap();
|
||||||
|
|
||||||
|
let header = Header {
|
||||||
|
magic,
|
||||||
|
width: input.read_u32::<LE>().unwrap(),
|
||||||
|
height: input.read_u32::<LE>().unwrap(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut compression_info = CompressionInfo {
|
||||||
|
chunk_count: input.read_u32::<LE>().unwrap() as usize,
|
||||||
|
chunks: Vec::new(),
|
||||||
|
};
|
||||||
|
|
||||||
|
for _ in 0..compression_info.chunk_count {
|
||||||
|
compression_info.chunks.push(ChunkInfo {
|
||||||
|
size_compressed: input.read_u32::<LE>().unwrap() as usize,
|
||||||
|
size_raw: input.read_u32::<LE>().unwrap() as usize,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let preprocessed_bitmap = decompress(&mut input, &compression_info);
|
||||||
|
let bitmap = line_diff(header.width, header.height, &preprocessed_bitmap);
|
||||||
|
|
||||||
|
DangoPicture {
|
||||||
|
header,
|
||||||
|
bitmap
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,54 @@
|
||||||
|
pub fn line_diff(width: u32, height: u32, data: &[u8]) -> Vec<u8> {
|
||||||
|
let mut output_buf = Vec::with_capacity((width * height * 4) as usize);
|
||||||
|
|
||||||
|
let block_height = (f32::ceil(height as f32 / 3.0) as u16) as u32;
|
||||||
|
|
||||||
|
let mut curr_line;
|
||||||
|
let mut prev_line = Vec::with_capacity(width as usize * 3);
|
||||||
|
|
||||||
|
let mut curr_alpha;
|
||||||
|
let mut prev_alpha = Vec::with_capacity(width as usize);
|
||||||
|
|
||||||
|
let mut rgb_index = 0;
|
||||||
|
let mut alpha_index = (width * height * 3) as usize;
|
||||||
|
for y in 0..height {
|
||||||
|
curr_line = data[rgb_index..rgb_index + width as usize * 3].to_vec();
|
||||||
|
curr_alpha = data[alpha_index..alpha_index + width as usize].to_vec();
|
||||||
|
|
||||||
|
if y % block_height != 0 {
|
||||||
|
curr_line
|
||||||
|
.iter_mut()
|
||||||
|
.zip(&prev_line)
|
||||||
|
.for_each(|(curr_p, prev_p)| {
|
||||||
|
*curr_p = curr_p.wrapping_add(*prev_p);
|
||||||
|
});
|
||||||
|
curr_alpha
|
||||||
|
.iter_mut()
|
||||||
|
.zip(&prev_alpha)
|
||||||
|
.for_each(|(curr_a, prev_a)| {
|
||||||
|
*curr_a = curr_a.wrapping_add(*prev_a);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the decoded RGBA data to the final buffer
|
||||||
|
curr_line
|
||||||
|
.windows(3)
|
||||||
|
.step_by(3)
|
||||||
|
.zip(&curr_alpha)
|
||||||
|
.for_each(|(curr_p, alpha_p)| {
|
||||||
|
output_buf.extend_from_slice(&[curr_p[0], curr_p[1], curr_p[2], *alpha_p]);
|
||||||
|
});
|
||||||
|
|
||||||
|
prev_line.clone_from(&curr_line);
|
||||||
|
prev_alpha.clone_from(&curr_alpha);
|
||||||
|
|
||||||
|
rgb_index += width as usize * 3;
|
||||||
|
alpha_index += width as usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
output_buf
|
||||||
|
}
|
||||||
|
|
||||||
pub fn diff_line(width: u32, height: u32, input: &[u8]) -> Vec<u8> {
|
pub fn diff_line(width: u32, height: u32, input: &[u8]) -> Vec<u8> {
|
||||||
let mut data = Vec::with_capacity(width as usize * 3);
|
let mut data = Vec::with_capacity(width as usize * 3);
|
||||||
let mut alpha_data = Vec::with_capacity(width as usize);
|
let mut alpha_data = Vec::with_capacity(width as usize);
|
||||||
|
|
Loading…
Reference in a new issue