Ran cargo fmt

This commit is contained in:
G2-Games 2024-07-05 00:11:09 -05:00
parent a1b4b04208
commit 721e61f98c
13 changed files with 173 additions and 145 deletions

View file

@ -1,17 +1,17 @@
use byteorder::ReadBytesExt;
use imagequant::Attributes;
use rgb::{ComponentSlice, RGBA8};
use std::{
collections::HashMap,
io::{Read, Seek},
};
use byteorder::ReadBytesExt;
use imagequant::Attributes;
use rgb::{ComponentSlice, RGBA8};
use crate::common::{CommonHeader, CzError};
/// A palette of RGBA values for indexed color
#[derive(Debug, Clone)]
pub struct Palette {
colors: Vec<RGBA8>
colors: Vec<RGBA8>,
}
impl Palette {
@ -76,7 +76,8 @@ pub fn rgba_to_indexed(input: &[u8], palette: &Palette) -> Result<Vec<u8>, CzErr
let value = match cache.get(rgba) {
Some(val) => *val,
None => {
let value = palette.colors()
let value = palette
.colors()
.iter()
.position(|e| e.as_slice() == rgba)
.unwrap_or_default() as u8;
@ -110,12 +111,9 @@ pub fn indexed_gen_palette(
let mut quant = Attributes::new();
quant.set_speed(1).unwrap();
let mut image = quant.new_image(
buf,
header.width() as usize,
header.height() as usize,
0.0
).unwrap();
let mut image = quant
.new_image(buf, header.width() as usize, header.height() as usize, 0.0)
.unwrap();
let mut quant_result = quant.quantize(&mut image).unwrap();

View file

@ -159,7 +159,10 @@ impl CommonHeader {
self.version
}
pub fn set_version<I: TryInto<CzVersion> + Into<u32> + Clone>(&mut self, version: I) -> Result<(), CzError> {
pub fn set_version<I: TryInto<CzVersion> + Into<u32> + Clone>(
&mut self,
version: I,
) -> Result<(), CzError> {
self.version = match version.clone().try_into() {
Ok(val) => val,
Err(_) => return Err(CzError::InvalidVersion(version.into())),
@ -200,10 +203,7 @@ impl CommonHeader {
self.unknown
}
pub fn write_into<T: WriteBytesExt + Write>(
&self,
output: &mut T,
) -> Result<(), io::Error> {
pub fn write_into<T: WriteBytesExt + Write>(&self, output: &mut T) -> Result<(), io::Error> {
let magic_bytes = [b'C', b'Z', b'0' + self.version as u8, b'\0'];
output.write_all(&magic_bytes)?;
@ -324,10 +324,7 @@ impl ExtendedHeader {
})
}
pub fn write_into<T: WriteBytesExt + Write>(
&self,
output: &mut T,
) -> Result<(), io::Error> {
pub fn write_into<T: WriteBytesExt + Write>(&self, output: &mut T) -> Result<(), io::Error> {
output.write_all(&self.unknown_1)?;
output.write_u16::<LittleEndian>(self.crop_width)?;
output.write_u16::<LittleEndian>(self.crop_height)?;

View file

@ -110,10 +110,7 @@ pub fn decompress<T: Seek + ReadBytesExt + Read>(
Ok(output_buf)
}
fn decompress_lzw(
input_data: &[u16],
size: usize
) -> Vec<u8> {
fn decompress_lzw(input_data: &[u16], size: usize) -> Vec<u8> {
let mut dictionary: HashMap<u16, Vec<u8>> = HashMap::new();
for i in 0..256 {
dictionary.insert(i as u16, vec![i as u8]);
@ -146,7 +143,6 @@ fn decompress_lzw(
result
}
/// Decompress an LZW compressed stream like CZ2
pub fn decompress2<T: Seek + ReadBytesExt + Read>(
input: &mut T,
@ -166,10 +162,7 @@ pub fn decompress2<T: Seek + ReadBytesExt + Read>(
Ok(output_buf)
}
fn decompress_lzw2(
input_data: &[u8],
size: usize
) -> Vec<u8> {
fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
let mut data = input_data.to_vec();
data[0] = 0;
let mut dictionary = HashMap::new();
@ -219,10 +212,7 @@ fn decompress_lzw2(
result
}
pub fn compress(
data: &[u8],
size: usize,
) -> (Vec<u8>, CompressionInfo) {
pub fn compress(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
let mut size = size;
if size == 0 {
size = 0xFEFD
@ -243,7 +233,7 @@ pub fn compress(
loop {
(count, part_data, last) = compress_lzw(&data[offset..], size, last);
if count == 0 {
break
break;
}
offset += count;
@ -253,7 +243,7 @@ pub fn compress(
output_info.chunks.push(ChunkInfo {
size_compressed: part_data.len(),
size_raw: count
size_raw: count,
});
output_info.chunk_count += 1;
@ -271,11 +261,7 @@ pub fn compress(
(output_buf, output_info)
}
fn compress_lzw(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u16>, Vec<u8>) {
fn compress_lzw(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u16>, Vec<u8>) {
let mut count = 0;
let mut dictionary = HashMap::new();
for i in 0..=255 {
@ -305,7 +291,7 @@ fn compress_lzw(
count += 1;
if size > 0 && compressed.len() == size {
break
break;
}
}
@ -316,21 +302,18 @@ fn compress_lzw(
compressed.push(*dictionary.get(&vec![c]).unwrap());
}
}
return (count, compressed, Vec::new())
return (count, compressed, Vec::new());
} else if compressed.len() < size {
if !last_element.is_empty() {
compressed.push(*dictionary.get(&last_element).unwrap());
}
return (count, compressed, Vec::new())
return (count, compressed, Vec::new());
}
(count, compressed, last_element)
}
pub fn compress2(
data: &[u8],
size: usize
) -> (Vec<u8>, CompressionInfo) {
pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
let size = if size == 0 { 0x87BDF } else { size };
let mut part_data;
@ -373,11 +356,7 @@ pub fn compress2(
(output_buf, output_info)
}
fn compress_lzw2(
data: &[u8],
size: usize,
last: Vec<u8>
) -> (usize, Vec<u8>, Vec<u8>) {
fn compress_lzw2(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
let mut data = data.to_vec();
if !data.is_empty() {
data[0] = 0;

View file

@ -106,10 +106,7 @@ impl DynamicCz {
/// Save the `DynamicCz` as a CZ# file. The format saved in is determined
/// from the format in the header. Check [`CommonHeader::set_version()`]
/// to change the CZ# version.
pub fn save_as_cz<P: ?Sized + AsRef<std::path::Path>>(
&self,
path: &P,
) -> Result<(), CzError> {
pub fn save_as_cz<P: ?Sized + AsRef<std::path::Path>>(&self, path: &P) -> Result<(), CzError> {
let mut out_file = BufWriter::new(File::create(path.as_ref())?);
self.encode(&mut out_file)?;
@ -122,10 +119,7 @@ impl DynamicCz {
/// This encodes everything based on options the header which have been
/// set by the user. For example, to change the version of file to be
/// saved, use [`CommonHeader::set_version()`]
pub fn encode<T: Write>(
&self,
mut output: &mut T
) -> Result<(), CzError> {
pub fn encode<T: Write>(&self, mut output: &mut T) -> Result<(), CzError> {
let mut header = *self.header();
if header.version() == CzVersion::CZ2 {

View file

@ -3,9 +3,7 @@ use std::io::{Read, Seek, Write};
use crate::common::CzError;
pub fn decode<T: Seek + ReadBytesExt + Read>(
input: &mut T
) -> Result<Vec<u8>, CzError> {
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Vec<u8>, CzError> {
// Get the rest of the file, which is the bitmap
let mut bitmap = vec![];
input.read_to_end(&mut bitmap)?;
@ -13,10 +11,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
Ok(bitmap)
}
pub fn encode<T: WriteBytesExt + Write>(
output: &mut T,
bitmap: &[u8]
) -> Result<(), CzError> {
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
output.write_all(bitmap)?;
Ok(())

View file

@ -4,9 +4,7 @@ use std::io::{Read, Seek, SeekFrom, Write};
use crate::common::CzError;
use crate::compression::{compress, decompress, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(
bytes: &mut T
) -> Result<Vec<u8>, CzError> {
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
// Get information about the compressed chunks
let block_info = get_chunk_info(bytes)?;
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
@ -17,10 +15,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
Ok(bitmap)
}
pub fn encode<T: WriteBytesExt + Write>(
output: &mut T,
bitmap: &[u8]
) -> Result<(), CzError> {
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
let (compressed_data, compressed_info) = compress(bitmap, 0xFEFD);
compressed_info.write_into(output)?;

View file

@ -4,9 +4,7 @@ use std::io::{Read, Seek, SeekFrom, Write};
use crate::common::CzError;
use crate::compression::{compress2, decompress2, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(
bytes: &mut T
) -> Result<Vec<u8>, CzError> {
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
// Get information about the compressed chunks
let block_info = get_chunk_info(bytes).unwrap();
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
@ -17,10 +15,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
Ok(bitmap)
}
pub fn encode<T: WriteBytesExt + Write>(
output: &mut T,
bitmap: &[u8]
) -> Result<(), CzError> {
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
let (compressed_data, compressed_info) = compress2(bitmap, 0x87BDF);
compressed_info.write_into(output)?;

View file

@ -1,7 +1,7 @@
use byteorder::{ReadBytesExt, WriteBytesExt};
use std::io::{Read, Seek, SeekFrom, Write};
use crate::common::{CzError, CommonHeader};
use crate::common::{CommonHeader, CzError};
use crate::compression::{compress, decompress, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(
@ -55,9 +55,10 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
curr_line = data[index..index + line_byte_count].to_vec();
if y % block_height as u32 != 0 {
curr_line.iter_mut().zip(&prev_line).for_each(|(curr_p, prev_p)| {
*curr_p = curr_p.wrapping_add(*prev_p)
});
curr_line
.iter_mut()
.zip(&prev_line)
.for_each(|(curr_p, prev_p)| *curr_p = curr_p.wrapping_add(*prev_p));
}
prev_line.clone_from(&curr_line);

View file

@ -1,7 +1,7 @@
use byteorder::{ReadBytesExt, WriteBytesExt};
use std::io::{Read, Seek, SeekFrom, Write};
use crate::common::{CzError, CommonHeader};
use crate::common::{CommonHeader, CzError};
use crate::compression::{compress, decompress, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(
@ -55,12 +55,18 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
curr_alpha = data[alpha_index..alpha_index + width as usize].to_vec();
if y % block_height != 0 {
curr_line.iter_mut().zip(&prev_line).for_each(|(curr_p, prev_p)| {
*curr_p = curr_p.wrapping_add(*prev_p);
});
curr_alpha.iter_mut().zip(&prev_alpha).for_each(|(curr_a, prev_a)| {
*curr_a = curr_a.wrapping_add(*prev_a);
});
curr_line
.iter_mut()
.zip(&prev_line)
.for_each(|(curr_p, prev_p)| {
*curr_p = curr_p.wrapping_add(*prev_p);
});
curr_alpha
.iter_mut()
.zip(&prev_alpha)
.for_each(|(curr_a, prev_a)| {
*curr_a = curr_a.wrapping_add(*prev_a);
});
}
// Write the decoded RGBA data to the final buffer
@ -69,12 +75,7 @@ fn line_diff(header: &CommonHeader, data: &[u8]) -> Vec<u8> {
.step_by(3)
.zip(&curr_alpha)
.for_each(|(curr_p, alpha_p)| {
output_buf.extend_from_slice(&[
curr_p[0],
curr_p[1],
curr_p[2],
*alpha_p,
]);
output_buf.extend_from_slice(&[curr_p[0], curr_p[1], curr_p[2], *alpha_p]);
});
prev_line.clone_from(&curr_line);
@ -106,8 +107,18 @@ fn diff_line(header: &CommonHeader, input: &[u8]) -> Vec<u8> {
let mut i = 0;
for y in 0..height {
curr_line = input[i..i + line_byte_count].windows(4).step_by(4).flat_map(|r| &r[0..3]).copied().collect();
curr_alpha = input[i..i + line_byte_count].iter().skip(3).step_by(4).copied().collect();
curr_line = input[i..i + line_byte_count]
.windows(4)
.step_by(4)
.flat_map(|r| &r[0..3])
.copied()
.collect();
curr_alpha = input[i..i + line_byte_count]
.iter()
.skip(3)
.step_by(4)
.copied()
.collect();
if y % block_height as u32 != 0 {
for x in 0..width as usize * 3 {

View file

@ -13,8 +13,8 @@ mod formats {
pub(crate) mod cz4;
}
use std::{io::BufReader, path::Path};
use common::CzError;
use std::{io::BufReader, path::Path};
/// Open a CZ# file from a path
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<DynamicCz, CzError> {

View file

@ -1,4 +1,9 @@
use std::{error::Error, fs::File, io::{BufWriter, Write}, path::Path};
use std::{
error::Error,
fs::File,
io::{BufWriter, Write},
path::Path,
};
/// A single file entry in a PAK file
#[derive(Debug, Clone)]

View file

@ -1,9 +1,13 @@
mod entry;
mod header;
use std::{fs::File, io::{self, BufRead, BufReader, Read, Seek, SeekFrom}, path::{Path, PathBuf}};
use byteorder::{LittleEndian, ReadBytesExt};
use header::Header;
use std::{
fs::File,
io::{self, BufRead, BufReader, Read, Seek, SeekFrom},
path::{Path, PathBuf},
};
use thiserror::Error;
use crate::entry::Entry;
@ -61,7 +65,10 @@ impl Pak {
}
/// Decode a PAK file from a byte stream
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T, path: PathBuf) -> Result<Self, PakError> {
pub fn decode<T: Seek + ReadBytesExt + Read>(
input: &mut T,
path: PathBuf,
) -> Result<Self, PakError> {
let mut input = BufReader::new(input);
// Read in all the header bytes
@ -93,7 +100,7 @@ impl Pak {
dbg!(unknown_pre_data.len());
if input.stream_position()? == header.data_offset() as u64 {
return Err(PakError::HeaderError)
return Err(PakError::HeaderError);
}
// Read all the offsets and lengths
@ -124,7 +131,11 @@ impl Pak {
let mut entries: Vec<Entry> = Vec::new();
for i in 0..header.entry_count() as usize {
// Seek to and read the entry data
input.seek(SeekFrom::Start(offsets[i].0 as u64 * header.block_size() as u64)).unwrap();
input
.seek(SeekFrom::Start(
offsets[i].0 as u64 * header.block_size() as u64,
))
.unwrap();
let mut data = vec![0u8; offsets[i].1 as usize];
input.read_exact(&mut data).unwrap();
@ -174,8 +185,7 @@ impl Pak {
pub fn contains_name(&self, name: String) -> bool {
self.entries
.iter()
.find(|e|
e.name.as_ref().is_some_and(|n| n == &name)
).is_some()
.find(|e| e.name.as_ref().is_some_and(|n| n == &name))
.is_some()
}
}

View file

@ -1,5 +1,5 @@
use std::path::{Path, PathBuf};
use clap::{error::ErrorKind, Error, Parser, Subcommand};
use std::path::{Path, PathBuf};
#[derive(Parser)]
#[command(name = "CZ Utils")]
@ -52,7 +52,7 @@ enum Commands {
/// Output CZ file bit depth
#[arg(short, long, value_name = "BIT DEPTH")]
depth: Option<u16>,
}
},
}
fn main() {
@ -60,18 +60,34 @@ fn main() {
// Check what subcommand was run
match &cli.command {
Commands::Decode { input, output, batch } => {
Commands::Decode {
input,
output,
batch,
} => {
if !input.exists() {
Error::raw(ErrorKind::ValueValidation, "The input file/folder provided does not exist\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"The input file/folder provided does not exist\n",
)
.exit()
}
if *batch {
if input.is_file() {
Error::raw(ErrorKind::ValueValidation, "Batch input must be a directory\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Batch input must be a directory\n",
)
.exit()
}
if output.is_none() || output.as_ref().unwrap().is_file() {
Error::raw(ErrorKind::ValueValidation, "Batch output must be a directory\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Batch output must be a directory\n",
)
.exit()
}
for entry in walkdir::WalkDir::new(input).max_depth(1) {
@ -91,10 +107,15 @@ fn main() {
Err(_) => {
Error::raw(
ErrorKind::ValueValidation,
format!("Could not open input as a CZ file: {}\n", path.into_os_string().to_str().unwrap())
).print().unwrap();
format!(
"Could not open input as a CZ file: {}\n",
path.into_os_string().to_str().unwrap()
),
)
.print()
.unwrap();
continue;
},
}
};
cz.save_as_png(&final_path).unwrap();
@ -110,27 +131,54 @@ fn main() {
}
}
}
Commands::Replace { batch, input, replacement, output, version, depth } => {
Commands::Replace {
batch,
input,
replacement,
output,
version,
depth,
} => {
if !input.exists() {
Error::raw(ErrorKind::ValueValidation, "The original file provided does not exist\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"The original file provided does not exist\n",
)
.exit()
}
if !replacement.exists() {
Error::raw(ErrorKind::ValueValidation, "The replacement file provided does not exist\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"The replacement file provided does not exist\n",
)
.exit()
}
// If it's a batch replacement, we want directories to search
if *batch {
if !input.is_dir() {
Error::raw(ErrorKind::ValueValidation, "Batch input location must be a directory\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Batch input location must be a directory\n",
)
.exit()
}
if !replacement.is_dir() {
Error::raw(ErrorKind::ValueValidation, "Batch replacement location must be a directory\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Batch replacement location must be a directory\n",
)
.exit()
}
if !output.is_dir() {
Error::raw(ErrorKind::ValueValidation, "Batch output location must be a directory\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Batch output location must be a directory\n",
)
.exit()
}
// Replace all the files within the directory and print errors for them
@ -140,25 +188,27 @@ fn main() {
{
let path = entry.unwrap().into_path();
if !path.is_file() {
continue
continue;
}
// Set the replacement image to the same name as the original file
let mut final_replacement = replacement.to_path_buf();
final_replacement.push(PathBuf::from(path.file_name().unwrap()).with_extension("png"));
final_replacement
.push(PathBuf::from(path.file_name().unwrap()).with_extension("png"));
// Set the replacement image to the same name as the original file
let mut final_output = output.to_path_buf();
final_output.push(path.file_name().unwrap());
if let Err(error) = replace_cz(
&path,
&final_output,
&final_replacement,
version,
depth
) {
Error::raw(ErrorKind::ValueValidation, format!("{:?} - {}\n", path, error)).print().unwrap();
if let Err(error) =
replace_cz(&path, &final_output, &final_replacement, version, depth)
{
Error::raw(
ErrorKind::ValueValidation,
format!("{:?} - {}\n", path, error),
)
.print()
.unwrap();
}
}
} else {
@ -171,19 +221,17 @@ fn main() {
}
if !output.is_file() {
Error::raw(ErrorKind::ValueValidation, "Replacement output must be a file\n").exit()
Error::raw(
ErrorKind::ValueValidation,
"Replacement output must be a file\n",
)
.exit()
}
// Replace the input file with the new image
replace_cz(
&input,
&output,
&replacement,
version,
depth
).unwrap();
replace_cz(&input, &output, &replacement, version, depth).unwrap();
}
},
}
}
}
@ -197,11 +245,11 @@ fn replace_cz<P: ?Sized + AsRef<Path>>(
) -> Result<(), Box<dyn std::error::Error>> {
let path = input_path.as_ref();
if !path.is_file() {
return Err("Input path is not a file".into())
return Err("Input path is not a file".into());
}
if !replacement_path.as_ref().exists() || !replacement_path.as_ref().is_file() {
return Err("Replacement path does not exist or is not a file".into())
return Err("Replacement path does not exist or is not a file".into());
}
// Open the replacement image and convert it to RGBA8