Improved binio.rs, updated LZW decompressor

This commit is contained in:
G2-Games 2024-07-02 10:06:25 -05:00
parent 3911c73761
commit 580af113eb
6 changed files with 91 additions and 88 deletions

View file

@ -1,8 +1,3 @@
pub enum BitError {
InputLength
}
pub struct BitIo {
data: Vec<u8>,
byte_offset: usize,
@ -40,7 +35,7 @@ impl BitIo {
/// Read some bits from the buffer
pub fn read_bit(&mut self, bit_len: usize) -> u64 {
if bit_len > 8 * 8 {
panic!()
panic!("Cannot read more than 64 bits")
}
if bit_len % 8 == 0 && self.bit_offset == 0 {
@ -66,7 +61,7 @@ impl BitIo {
/// Read some bytes from the buffer
pub fn read(&mut self, byte_len: usize) -> u64 {
if byte_len > 8 {
panic!()
panic!("Cannot read more than 8 bytes")
}
let mut padded_slice = [0u8; 8];
@ -79,7 +74,7 @@ impl BitIo {
/// Write some bits to the buffer
pub fn write_bit(&mut self, data: u64, bit_len: usize) {
if bit_len > 8 * 8 {
panic!();
panic!("Cannot write more than 64 bits");
}
if bit_len % 8 == 0 && self.bit_offset == 0 {
@ -104,9 +99,9 @@ impl BitIo {
self.byte_size = self.byte_offset + (self.bit_offset + 7) / 8;
}
pub fn write(&mut self, data: u64, byte_len: usize) -> Result<(), BitError> {
pub fn write(&mut self, data: u64, byte_len: usize) {
if byte_len > 8 {
return Err(BitError::InputLength);
panic!("Cannot write more than 8 bytes")
}
let mut padded_slice = [0u8; 8];
@ -117,7 +112,5 @@ impl BitIo {
self.byte_offset += byte_len;
self.byte_size = self.byte_offset + (self.bit_offset + 7) / 8;
Ok(())
}
}

View file

@ -156,10 +156,12 @@ impl CommonHeader {
self.version
}
pub fn set_version<I: TryInto<CzVersion>>(&mut self, version: I) -> Result<(), ()> {
pub fn set_version<I: TryInto<CzVersion>>(
&mut self, version: I
) -> Result<(), Box<dyn std::error::Error>> {
self.version = match version.try_into() {
Ok(val) => val,
Err(_) => return Err(()),
Err(_) => return Err("Invalid CZ value".into()),
};
Ok(())

View file

@ -27,7 +27,7 @@ pub struct CompressionInfo {
pub total_size_compressed: usize,
/// Total size of the original uncompressed data
pub total_size_raw: usize,
pub _total_size_raw: usize,
/// The compression chunk information
pub chunks: Vec<ChunkInfo>,
@ -82,7 +82,7 @@ pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
Ok(CompressionInfo {
chunk_count: parts_count as usize,
total_size_compressed: total_size as usize,
total_size_raw: total_size_raw as usize,
_total_size_raw: total_size_raw as usize,
chunks: part_sizes,
length: bytes.stream_position()? as usize,
})
@ -93,70 +93,59 @@ pub fn decompress<T: Seek + ReadBytesExt + Read>(
input: &mut T,
chunk_info: &CompressionInfo,
) -> Result<Vec<u8>, CzError> {
let mut m_dst = 0;
let mut bitmap = vec![0; chunk_info.total_size_raw];
for chunk in &chunk_info.chunks {
let mut part = vec![0u8; chunk.size_compressed * 2];
input.read_exact(&mut part)?;
let mut output_buf: Vec<u8> = vec![];
for j in (0..part.len()).step_by(2) {
let ctl = part[j + 1];
for block in &chunk_info.chunks {
let mut buffer = vec![0u16; block.size_compressed];
if ctl == 0 {
bitmap[m_dst] = part[j];
m_dst += 1;
for word in buffer.iter_mut() {
*word = input.read_u16::<LittleEndian>().unwrap();
}
let raw_buf = decompress_lzw(&buffer, block.size_raw);
output_buf.write_all(&raw_buf)?;
}
Ok(output_buf)
}
fn decompress_lzw(
input_data: &[u16],
size: usize
) -> Vec<u8> {
let mut dictionary: HashMap<u16, Vec<u8>> = HashMap::new();
for i in 0..256 {
dictionary.insert(i as u16, vec![i as u8]);
}
let mut dictionary_count = dictionary.len() as u16;
let mut w = vec![0];
let mut result = Vec::with_capacity(size);
input_data.iter().for_each(|element| {
let mut entry;
if let Some(x) = dictionary.get(element) {
entry = x.clone();
} else if *element == dictionary_count {
entry = w.clone();
entry.push(w[0]);
} else {
m_dst += copy_range(&mut bitmap, &part, get_offset(&part, j), m_dst);
}
}
panic!("Bad compressed element: {}", element)
}
bitmap.truncate(chunk_info.total_size_raw);
result.write_all(&entry).unwrap();
w.push(entry[0]);
Ok(bitmap)
dictionary.insert(dictionary_count, w.clone());
dictionary_count += 1;
w = entry;
});
result
}
fn get_offset(input: &[u8], src: usize) -> usize {
(((input[src] as usize) | (input[src + 1] as usize) << 8) - 0x101) * 2
}
fn copy_range(bitmap: &mut Vec<u8>, input: &[u8], src: usize, dst: usize) -> usize {
let mut dst = dst;
let start_pos = dst;
if input[src + 1] == 0 {
bitmap[dst] = input[src];
dst += 1;
} else if get_offset(input, src) == src {
bitmap[dst] = 0;
dst += 1;
} else {
dst += copy_range(bitmap, input, get_offset(input, src), dst);
}
if input[src + 3] == 0 {
bitmap[dst] = input[src + 2];
dst += 1;
} else if get_offset(input, src + 2) == src {
bitmap[dst] = bitmap[start_pos];
dst += 1;
} else {
bitmap[dst] = copy_one(input, get_offset(input, src + 2));
dst += 1;
}
dst - start_pos
}
fn copy_one(input: &[u8], src: usize) -> u8 {
if input[src + 1] == 0 {
input[src]
} else if get_offset(input, src) == src {
0
} else {
copy_one(input, get_offset(input, src))
}
}
/// Decompress an LZW compressed stream like CZ2
pub fn decompress2<T: Seek + ReadBytesExt + Read>(
@ -177,7 +166,10 @@ pub fn decompress2<T: Seek + ReadBytesExt + Read>(
Ok(output_buf)
}
fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
fn decompress_lzw2(
input_data: &[u8],
size: usize
) -> Vec<u8> {
let mut data = input_data.to_vec();
data[0] = 0;
let mut dictionary = HashMap::new();
@ -244,7 +236,7 @@ pub fn compress(
let mut output_buf: Vec<u8> = vec![];
let mut output_info = CompressionInfo {
total_size_raw: data.len(),
_total_size_raw: data.len(),
..Default::default()
};
@ -279,7 +271,11 @@ pub fn compress(
(output_buf, output_info)
}
fn compress_lzw(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u16>, Vec<u8>) {
fn compress_lzw(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u16>, Vec<u8>) {
let mut count = 0;
let mut dictionary = HashMap::new();
for i in 0..=255 {
@ -331,7 +327,10 @@ fn compress_lzw(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u16>, Ve
(count, compressed, last_element)
}
pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
pub fn compress2(
data: &[u8],
size: usize
) -> (Vec<u8>, CompressionInfo) {
let size = if size == 0 { 0x87BDF } else { size };
let mut part_data;
@ -342,7 +341,7 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
let mut output_buf: Vec<u8> = Vec::new();
let mut output_info = CompressionInfo {
total_size_raw: data.len(),
_total_size_raw: data.len(),
..Default::default()
};
@ -374,7 +373,11 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
(output_buf, output_info)
}
fn compress_lzw2(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
fn compress_lzw2(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u8>, Vec<u8>) {
let mut data = data.to_vec();
if !data.is_empty() {
data[0] = 0;

View file

@ -113,7 +113,7 @@ impl DynamicCz {
/// to change the CZ# version.
pub fn save_as_cz<T: Into<std::path::PathBuf>>(&self, path: T) -> Result<(), CzError> {
let mut out_file = BufWriter::new(File::create(path.into())?);
let mut header = self.header().clone();
let mut header = *self.header();
if header.version() == CzVersion::CZ2 {
header.set_length(0x12)

View file

@ -55,9 +55,9 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
let mut curr_line;
let mut prev_line = Vec::with_capacity(line_byte_count);
let mut i = 0;
let mut index = 0;
for y in 0..height {
curr_line = data[i..i + line_byte_count].to_vec();
curr_line = data[index..index + line_byte_count].to_vec();
if y % block_height as u32 != 0 {
curr_line.iter_mut().zip(&prev_line).for_each(|(curr_p, prev_p)| {
@ -67,7 +67,7 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
prev_line.clone_from(&curr_line);
if pixel_byte_count == 4 {
output_buf[i..i + line_byte_count].copy_from_slice(&curr_line);
output_buf[index..index + line_byte_count].copy_from_slice(&curr_line);
} else if pixel_byte_count == 3 {
for x in (0..line_byte_count).step_by(3) {
let loc = (y * 3 * width) as usize + x;
@ -86,7 +86,7 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
}
}
i += line_byte_count;
index += line_byte_count;
}
output_buf

View file

@ -68,18 +68,23 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
});
}
for x in 0..width as usize {
let pos = x * 3;
// Write the decoded RGBA data to the final buffer
curr_line
.windows(3)
.step_by(3)
.zip(&curr_alpha)
.for_each(|(curr_p, alpha_p)| {
output_buf.extend_from_slice(&[
curr_line[pos],
curr_line[pos + 1],
curr_line[pos + 2],
curr_alpha[x],
curr_p[0],
curr_p[1],
curr_p[2],
*alpha_p,
]);
}
});
prev_line.clone_from(&curr_line);
prev_alpha.clone_from(&curr_alpha);
rgb_index += width as usize * 3;
alpha_index += width as usize;
}