diff --git a/cz/src/compression.rs b/cz/src/compression.rs index f121278..4ab9d31 100644 --- a/cz/src/compression.rs +++ b/cz/src/compression.rs @@ -1,4 +1,4 @@ -use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use byteorder::{ReadBytesExt, WriteBytesExt, LE}; use std::{ collections::HashMap, io::{Read, Seek, Write}, @@ -41,11 +41,11 @@ impl CompressionInfo { &self, output: &mut T, ) -> Result<(), std::io::Error> { - output.write_u32::(self.chunk_count as u32)?; + output.write_u32::(self.chunk_count as u32)?; for chunk in &self.chunks { - output.write_u32::(chunk.size_compressed as u32)?; - output.write_u32::(chunk.size_raw as u32)?; + output.write_u32::(chunk.size_compressed as u32)?; + output.write_u32::(chunk.size_raw as u32)?; } Ok(()) @@ -59,7 +59,7 @@ impl CompressionInfo { pub fn get_chunk_info( bytes: &mut T, ) -> Result { - let parts_count = bytes.read_u32::()?; + let parts_count = bytes.read_u32::()?; let mut part_sizes = vec![]; let mut total_size = 0; @@ -67,10 +67,10 @@ pub fn get_chunk_info( // Loop over the compressed bytes for _ in 0..parts_count { - let compressed_size = bytes.read_u32::()?; + let compressed_size = bytes.read_u32::()?; total_size = i32::wrapping_add(total_size, compressed_size as i32); - let raw_size = bytes.read_u32::()?; + let raw_size = bytes.read_u32::()?; total_size_raw = u32::wrapping_add(total_size_raw, raw_size); part_sizes.push(ChunkInfo { @@ -99,7 +99,7 @@ pub fn decompress( let mut buffer = vec![0u16; block.size_compressed]; for word in buffer.iter_mut() { - *word = input.read_u16::().unwrap(); + *word = input.read_u16::().unwrap(); } let raw_buf = decompress_lzw(&buffer, block.size_raw); diff --git a/documents/cz_format_spec.odt b/documents/cz_format_spec.odt new file mode 100644 index 0000000..18bb182 Binary files /dev/null and b/documents/cz_format_spec.odt differ