This commit is contained in:
G2-Games 2024-07-02 12:04:04 -05:00
commit 1b5ebd91a7
6 changed files with 142 additions and 113 deletions

View file

@ -6,8 +6,11 @@ description="""
A encoder/decoder for CZ# image files used in the LUCA System Engine.
"""
[features]
png = ["dep:image"]
[dependencies]
byteorder = "1.5.0"
thiserror = "1.0.59"
image = { version = "0.25.1", default-features = false, features = ["png"] }
imagequant = "4.3.1"
image = { version = "0.25", default-features = false, features = ["png"], optional = true }

View file

@ -1,4 +1,4 @@
pub struct BitIO {
pub struct BitIo {
data: Vec<u8>,
byte_offset: usize,
bit_offset: usize,
@ -6,7 +6,8 @@ pub struct BitIO {
byte_size: usize,
}
impl BitIO {
impl BitIo {
/// Create a new BitIO reader and writer over some data
pub fn new(data: Vec<u8>) -> Self {
Self {
data,
@ -16,22 +17,25 @@ impl BitIO {
}
}
/// Get the byte offset of the reader
pub fn byte_offset(&self) -> usize {
self.byte_offset
}
/// Get the byte size of the reader
pub fn byte_size(&self) -> usize {
self.byte_size
}
/// Get the current bytes up to `byte_size` in the reader
pub fn bytes(&self) -> Vec<u8> {
self.data[..self.byte_size].to_vec()
}
/// Read some bits from the buffer
pub fn read_bit(&mut self, bit_len: usize) -> u64 {
//print!("{}: ", bit_len);
if bit_len > 8 * 8 {
panic!()
panic!("Cannot read more than 64 bits")
}
if bit_len % 8 == 0 && self.bit_offset == 0 {
@ -54,9 +58,10 @@ impl BitIO {
result
}
/// Read some bytes from the buffer
pub fn read(&mut self, byte_len: usize) -> u64 {
if byte_len > 8 {
panic!()
panic!("Cannot read more than 8 bytes")
}
let mut padded_slice = [0u8; 8];
@ -66,9 +71,10 @@ impl BitIO {
u64::from_le_bytes(padded_slice)
}
/// Write some bits to the buffer
pub fn write_bit(&mut self, data: u64, bit_len: usize) {
if bit_len > 8 * 8 {
panic!();
panic!("Cannot write more than 64 bits");
}
if bit_len % 8 == 0 && self.bit_offset == 0 {
@ -95,7 +101,7 @@ impl BitIO {
pub fn write(&mut self, data: u64, byte_len: usize) {
if byte_len > 8 {
panic!()
panic!("Cannot write more than 8 bytes")
}
let mut padded_slice = [0u8; 8];

View file

@ -4,7 +4,7 @@ use std::{
io::{Read, Seek, Write},
};
use crate::binio::BitIO;
use crate::binio::BitIo;
use crate::common::CzError;
/// The size of compressed data in each chunk
@ -27,7 +27,7 @@ pub struct CompressionInfo {
pub total_size_compressed: usize,
/// Total size of the original uncompressed data
pub total_size_raw: usize,
pub _total_size_raw: usize,
/// The compression chunk information
pub chunks: Vec<ChunkInfo>,
@ -82,7 +82,7 @@ pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
Ok(CompressionInfo {
chunk_count: parts_count as usize,
total_size_compressed: total_size as usize,
total_size_raw: total_size_raw as usize,
_total_size_raw: total_size_raw as usize,
chunks: part_sizes,
length: bytes.stream_position()? as usize,
})
@ -93,70 +93,59 @@ pub fn decompress<T: Seek + ReadBytesExt + Read>(
input: &mut T,
chunk_info: &CompressionInfo,
) -> Result<Vec<u8>, CzError> {
let mut m_dst = 0;
let mut bitmap = vec![0; chunk_info.total_size_raw];
for chunk in &chunk_info.chunks {
let mut part = vec![0u8; chunk.size_compressed * 2];
input.read_exact(&mut part)?;
let mut output_buf: Vec<u8> = vec![];
for j in (0..part.len()).step_by(2) {
let ctl = part[j + 1];
for block in &chunk_info.chunks {
let mut buffer = vec![0u16; block.size_compressed];
if ctl == 0 {
bitmap[m_dst] = part[j];
m_dst += 1;
} else {
m_dst += copy_range(&mut bitmap, &part, get_offset(&part, j), m_dst);
}
}
for word in buffer.iter_mut() {
*word = input.read_u16::<LittleEndian>().unwrap();
}
bitmap.truncate(chunk_info.total_size_raw);
let raw_buf = decompress_lzw(&buffer, block.size_raw);
Ok(bitmap)
output_buf.write_all(&raw_buf)?;
}
Ok(output_buf)
}
fn get_offset(input: &[u8], src: usize) -> usize {
(((input[src] as usize) | (input[src + 1] as usize) << 8) - 0x101) * 2
}
fn decompress_lzw(
input_data: &[u16],
size: usize
) -> Vec<u8> {
let mut dictionary: HashMap<u16, Vec<u8>> = HashMap::new();
for i in 0..256 {
dictionary.insert(i as u16, vec![i as u8]);
}
let mut dictionary_count = dictionary.len() as u16;
fn copy_range(bitmap: &mut Vec<u8>, input: &[u8], src: usize, dst: usize) -> usize {
let mut dst = dst;
let start_pos = dst;
let mut w = vec![0];
let mut result = Vec::with_capacity(size);
if input[src + 1] == 0 {
bitmap[dst] = input[src];
dst += 1;
} else if get_offset(input, src) == src {
bitmap[dst] = 0;
dst += 1;
input_data.iter().for_each(|element| {
let mut entry;
if let Some(x) = dictionary.get(element) {
entry = x.clone();
} else if *element == dictionary_count {
entry = w.clone();
entry.push(w[0]);
} else {
dst += copy_range(bitmap, input, get_offset(input, src), dst);
panic!("Bad compressed element: {}", element)
}
if input[src + 3] == 0 {
bitmap[dst] = input[src + 2];
dst += 1;
} else if get_offset(input, src + 2) == src {
bitmap[dst] = bitmap[start_pos];
dst += 1;
} else {
bitmap[dst] = copy_one(input, get_offset(input, src + 2));
dst += 1;
}
result.write_all(&entry).unwrap();
w.push(entry[0]);
dst - start_pos
dictionary.insert(dictionary_count, w.clone());
dictionary_count += 1;
w = entry;
});
result
}
fn copy_one(input: &[u8], src: usize) -> u8 {
if input[src + 1] == 0 {
input[src]
} else if get_offset(input, src) == src {
0
} else {
copy_one(input, get_offset(input, src))
}
}
/// Decompress an LZW compressed stream like CZ2
pub fn decompress2<T: Seek + ReadBytesExt + Read>(
@ -177,7 +166,10 @@ pub fn decompress2<T: Seek + ReadBytesExt + Read>(
Ok(output_buf)
}
fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
fn decompress_lzw2(
input_data: &[u8],
size: usize
) -> Vec<u8> {
let mut data = input_data.to_vec();
data[0] = 0;
let mut dictionary = HashMap::new();
@ -189,7 +181,7 @@ fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
let data_size = input_data.len();
data.extend_from_slice(&[0, 0]);
let mut bit_io = BitIO::new(data);
let mut bit_io = BitIo::new(data);
let mut w = dictionary.get(&0).unwrap().clone();
let mut element;
@ -244,7 +236,7 @@ pub fn compress(
let mut output_buf: Vec<u8> = vec![];
let mut output_info = CompressionInfo {
total_size_raw: data.len(),
_total_size_raw: data.len(),
..Default::default()
};
@ -279,7 +271,11 @@ pub fn compress(
(output_buf, output_info)
}
fn compress_lzw(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u16>, Vec<u8>) {
fn compress_lzw(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u16>, Vec<u8>) {
let mut count = 0;
let mut dictionary = HashMap::new();
for i in 0..=255 {
@ -331,7 +327,10 @@ fn compress_lzw(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u16>, Ve
(count, compressed, last_element)
}
pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
pub fn compress2(
data: &[u8],
size: usize
) -> (Vec<u8>, CompressionInfo) {
let size = if size == 0 { 0x87BDF } else { size };
let mut part_data;
@ -342,7 +341,7 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
let mut output_buf: Vec<u8> = Vec::new();
let mut output_info = CompressionInfo {
total_size_raw: data.len(),
_total_size_raw: data.len(),
..Default::default()
};
@ -374,7 +373,11 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
(output_buf, output_info)
}
fn compress_lzw2(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
fn compress_lzw2(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u8>, Vec<u8>) {
let mut data = data.to_vec();
if !data.is_empty() {
data[0] = 0;
@ -391,8 +394,8 @@ fn compress_lzw2(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u8>, Ve
element = last
}
let mut bit_io = BitIO::new(vec![0u8; size + 2]);
let write_bit = |bit_io: &mut BitIO, code: u64| {
let mut bit_io = BitIo::new(vec![0u8; size + 2]);
let write_bit = |bit_io: &mut BitIo, code: u64| {
if code > 0x7FFF {
bit_io.write_bit(1, 1);
bit_io.write_bit(code, 18);

View file

@ -113,7 +113,7 @@ impl DynamicCz {
/// to change the CZ# version.
pub fn save_as_cz<T: Into<std::path::PathBuf>>(&self, path: T) -> Result<(), CzError> {
let mut out_file = BufWriter::new(File::create(path.into())?);
let mut header = self.header().clone();
let mut header = *self.header();
if header.version() == CzVersion::CZ2 {
header.set_length(0x12)
@ -195,6 +195,7 @@ impl DynamicCz {
/// Internally, the [`DynamicCz`] struct operates on 32-bit RGBA values,
/// which is the highest encountered in CZ# files, therefore saving them
/// as a PNG of the same or better quality is lossless.
#[cfg(feature = "png")]
pub fn save_as_png<P: ?Sized + AsRef<Path>>(
&self,
path: &P,

View file

@ -1,5 +1,6 @@
use byteorder::{ReadBytesExt, WriteBytesExt};
use std::io::{Read, Seek, SeekFrom, Write};
use std::time::Instant;
use crate::common::{CommonHeader, CzError};
use crate::compression::{compress, decompress, get_chunk_info};
@ -11,8 +12,13 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
let block_info = get_chunk_info(bytes)?;
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
let timer = Instant::now();
let bitmap = decompress(bytes, &block_info)?;
dbg!(timer.elapsed());
let timer = Instant::now();
let bitmap = line_diff(header, &bitmap);
dbg!(timer.elapsed());
Ok(bitmap)
}
@ -46,22 +52,22 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
let pixel_byte_count = header.depth() >> 3;
let line_byte_count = (width * pixel_byte_count as u32) as usize;
let mut curr_line: Vec<u8>;
let mut prev_line: Vec<u8> = Vec::with_capacity(line_byte_count);
let mut curr_line;
let mut prev_line = Vec::with_capacity(line_byte_count);
let mut i = 0;
let mut index = 0;
for y in 0..height {
curr_line = data[i..i + line_byte_count].to_vec();
curr_line = data[index..index + line_byte_count].to_vec();
if y % block_height as u32 != 0 {
for x in 0..line_byte_count {
curr_line[x] = u8::wrapping_add(curr_line[x], prev_line[x])
}
curr_line.iter_mut().zip(&prev_line).for_each(|(curr_p, prev_p)| {
*curr_p = curr_p.wrapping_add(*prev_p)
});
}
prev_line.clone_from(&curr_line);
if pixel_byte_count == 4 {
output_buf[i..i + line_byte_count].copy_from_slice(&curr_line);
output_buf[index..index + line_byte_count].copy_from_slice(&curr_line);
} else if pixel_byte_count == 3 {
for x in (0..line_byte_count).step_by(3) {
let loc = (y * 3 * width) as usize + x;
@ -80,7 +86,7 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
}
}
i += line_byte_count;
index += line_byte_count;
}
output_buf

View file

@ -1,6 +1,6 @@
use byteorder::{ReadBytesExt, WriteBytesExt};
use image::RgbaImage;
use std::io::{Read, Seek, SeekFrom, Write};
use std::time::Instant;
use crate::common::{CommonHeader, CzError};
use crate::compression::{compress, decompress, get_chunk_info};
@ -12,14 +12,15 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
let block_info = get_chunk_info(bytes)?;
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
let timer = Instant::now();
let data = decompress(bytes, &block_info)?;
dbg!(timer.elapsed());
let timer = Instant::now();
let output = line_diff(header, &data);
dbg!(timer.elapsed());
let mut picture = image::RgbaImage::new(header.width() as u32, header.height() as u32);
line_diff(&mut picture, &data);
Ok(picture.into_raw())
Ok(output)
}
pub fn encode<T: WriteBytesExt + Write>(
@ -38,9 +39,12 @@ pub fn encode<T: WriteBytesExt + Write>(
Ok(())
}
fn line_diff(picture: &mut RgbaImage, data: &[u8]) {
let width = picture.width();
let height = picture.height();
fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
let width = header.width() as u32;
let height = header.height() as u32;
let mut output_buf = Vec::with_capacity((width * height * 4) as usize);
let block_height = (f32::ceil(height as f32 / 3.0) as u16) as u32;
let mut curr_line;
@ -49,37 +53,43 @@ fn line_diff(picture: &mut RgbaImage, data: &[u8]) {
let mut curr_alpha;
let mut prev_alpha = Vec::with_capacity(width as usize);
let pcount = (width * height * 3) as usize;
let mut i = 0;
let mut z = 0;
let mut rgb_index = 0;
let mut alpha_index = (width * height * 3) as usize;
for y in 0..height {
curr_line = data[i..i + width as usize * 3].to_vec();
curr_alpha = data[pcount + z..pcount + z + width as usize].to_vec();
curr_line = data[rgb_index..rgb_index + width as usize * 3].to_vec();
curr_alpha = data[alpha_index..alpha_index + width as usize].to_vec();
if y % block_height != 0 {
for x in 0..(width as usize * 3) {
curr_line[x] = curr_line[x].wrapping_add(prev_line[x])
}
for x in 0..width as usize {
curr_alpha[x] = curr_alpha[x].wrapping_add(prev_alpha[x])
}
curr_line.iter_mut().zip(&prev_line).for_each(|(curr_p, prev_p)| {
*curr_p = curr_p.wrapping_add(*prev_p);
});
curr_alpha.iter_mut().zip(&prev_alpha).for_each(|(curr_a, prev_a)| {
*curr_a = curr_a.wrapping_add(*prev_a);
});
}
for x in 0..width as usize {
picture.get_pixel_mut(x as u32, y).0 = [
curr_line[x * 3],
curr_line[x * 3 + 1],
curr_line[x * 3 + 2],
curr_alpha[x],
];
}
// Write the decoded RGBA data to the final buffer
curr_line
.windows(3)
.step_by(3)
.zip(&curr_alpha)
.for_each(|(curr_p, alpha_p)| {
output_buf.extend_from_slice(&[
curr_p[0],
curr_p[1],
curr_p[2],
*alpha_p,
]);
});
prev_line.clone_from(&curr_line);
prev_alpha.clone_from(&curr_alpha);
i += width as usize * 3;
z += width as usize;
rgb_index += width as usize * 3;
alpha_index += width as usize;
}
output_buf
}
fn diff_line(header: &CommonHeader, input: &[u8]) -> Vec<u8> {