More PAK work, fixed various CZ issues

This commit is contained in:
G2-Games 2024-06-26 20:38:26 -05:00
parent 6b8bfa6405
commit 1e8683b25b
9 changed files with 190 additions and 119 deletions

View file

@ -68,10 +68,10 @@ pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
// Loop over the compressed bytes
for _ in 0..parts_count {
let compressed_size = bytes.read_u32::<LittleEndian>()?;
total_size += compressed_size;
total_size = i32::wrapping_add(total_size, compressed_size as i32);
let raw_size = bytes.read_u32::<LittleEndian>()?;
total_size_raw += raw_size;
total_size_raw = u32::wrapping_add(total_size_raw, raw_size);
part_sizes.push(ChunkInfo {
size_compressed: compressed_size as usize,
@ -159,7 +159,7 @@ fn copy_one(input: &[u8], src: usize) -> u8 {
}
/// Decompress an LZW compressed stream like CZ2
pub fn decompress_2<T: Seek + ReadBytesExt + Read>(
pub fn decompress2<T: Seek + ReadBytesExt + Read>(
input: &mut T,
chunk_info: &CompressionInfo,
) -> Result<Vec<u8>, CzError> {
@ -348,11 +348,9 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
loop {
(count, part_data, last) = compress_lzw2(&data[offset..], size, last);
if count == 0 {
break;
}
offset += count;
output_buf.write_all(&part_data).unwrap();
@ -378,7 +376,7 @@ pub fn compress2(data: &[u8], size: usize) -> (Vec<u8>, CompressionInfo) {
fn compress_lzw2(data: &[u8], size: usize, last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
let mut data = data.to_vec();
if data.is_empty() {
if !data.is_empty() {
data[0] = 0;
}
let mut count = 0;

View file

@ -113,10 +113,14 @@ impl DynamicCz {
/// to change the CZ# version.
pub fn save_as_cz<T: Into<std::path::PathBuf>>(&self, path: T) -> Result<(), CzError> {
let mut out_file = BufWriter::new(File::create(path.into())?);
let mut header = self.header().clone();
self.header_common.write_into(&mut out_file)?;
if header.version() == CzVersion::CZ2 {
header.set_length(0x12)
}
header.write_into(&mut out_file)?;
if self.header().version() == CzVersion::CZ2 {
if header.version() == CzVersion::CZ2 {
// CZ2 files have this odd section instead of an extended header...?
out_file.write_all(&[0, 0, 0])?;
} else if let Some(ext) = self.header_extended {
@ -124,7 +128,7 @@ impl DynamicCz {
}
let output_bitmap;
match self.header_common.depth() {
match header.depth() {
4 => {
eprintln!("Files with a bit depth of 4 are not yet supported");
todo!()

View file

@ -5,7 +5,7 @@ use crate::common::CzError;
use crate::compression::{compress, decompress, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
// Get the information about the compressed chunks
// Get information about the compressed chunks
let block_info = get_chunk_info(bytes)?;
bytes.seek(SeekFrom::Start(block_info.length as u64))?;

View file

@ -2,13 +2,15 @@ use byteorder::{ReadBytesExt, WriteBytesExt};
use std::io::{Read, Seek, SeekFrom, Write};
use crate::common::CzError;
use crate::compression::{compress2, decompress_2, get_chunk_info};
use crate::compression::{compress2, decompress2, get_chunk_info};
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
let block_info = get_chunk_info(bytes)?;
// Get information about the compressed chunks
let block_info = get_chunk_info(bytes).unwrap();
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
let bitmap = decompress_2(bytes, &block_info).unwrap();
// Get the bitmap
let bitmap = decompress2(bytes, &block_info).unwrap();
Ok(bitmap)
}
@ -16,8 +18,6 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, C
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
let (compressed_data, compressed_info) = compress2(bitmap, 0x87BDF);
dbg!(&compressed_info);
compressed_info.write_into(output)?;
output.write_all(&compressed_data)?;

View file

@ -3,17 +3,29 @@ use std::{error::Error, fs::File, io::{BufWriter, Write}, path::Path};
/// A single file entry in a PAK file
#[derive(Debug, Clone)]
pub struct Entry {
/// The location within the PAK file, this number is multiplied by the
/// block size
pub(super) offset: u32,
/// The size of the entry in bytes
pub(super) length: u32,
/// The actual data which make up the entry
pub(super) data: Vec<u8>,
pub(super) name: String,
pub(super) id: u8,
/// The name of the entry as stored in the PAK
pub(super) name: Option<String>,
pub(super) unknown1: Option<u32>,
/// The ID of the entry, effectively an index
pub(super) id: u32,
pub(super) replace: bool, // TODO: Look into a better way to indicate this
}
impl Entry {
/// Get the name of the [`Entry`]
pub fn name(&self) -> &String {
pub fn name(&self) -> &Option<String> {
&self.name
}
@ -25,7 +37,11 @@ impl Entry {
}
// Save the file to <folder> + <file name>
path.push(&self.name);
if let Some(name) = &self.name {
path.push(&name);
} else {
path.push(&self.id.to_string())
}
let mut out_file = BufWriter::new(File::create(path)?);

40
luca_pak/src/header.rs Normal file
View file

@ -0,0 +1,40 @@
/// The header of a PAK file
#[derive(Debug, Clone)]
pub struct Header {
/// The starting position of the data within the PAK file
pub(super) data_offset: u32,
/// The number of entries within the PAK
pub(super) entry_count: u32,
pub(super) id_start: u32,
pub(super) block_size: u32,
pub(super) unknown1: u32,
pub(super) unknown2: u32,
pub(super) unknown3: u32,
pub(super) unknown4: u32,
pub(super) flags: u32,
}
impl Header {
pub fn block_size(&self) -> u32 {
self.block_size
}
pub fn id_start(&self) -> u32 {
self.id_start
}
pub fn entry_count(&self) -> u32 {
self.entry_count
}
pub fn data_offset(&self) -> u32 {
self.data_offset
}
pub fn flags(&self) -> u32 {
self.flags
}
}

View file

@ -1,51 +1,14 @@
mod entry;
mod header;
use std::{fs::File, io::{self, BufRead, BufReader, Read, Seek, SeekFrom}, path::Path};
use std::{fs::File, io::{self, BufRead, BufReader, Read, Seek, SeekFrom}, path::{Path, PathBuf}};
use byteorder::{LittleEndian, ReadBytesExt};
use header::Header;
use thiserror::Error;
use crate::entry::Entry;
/// A full PAK file with a header and its contents
#[derive(Debug, Clone)]
pub struct Pak {
header: Header,
files: Vec<Entry>,
file_name: String,
rebuild: bool, // TODO: Look into a better way to indicate this
}
/// The header of a PAK file
#[derive(Debug, Clone)]
struct Header {
data_offset: u32,
file_count: u32,
id_start: u32,
block_size: u32,
unknown1: u32,
unknown2: u32,
unknown3: u32,
unknown4: u32,
flags: u32,
}
impl Header {
pub fn block_size(&self) -> u32 {
self.block_size
}
pub fn file_count(&self) -> u32 {
self.file_count
}
pub fn data_offset(&self) -> u32 {
self.data_offset
}
}
/// An error associated with a PAK file
#[derive(Error, Debug)]
pub enum PakError {
#[error("Could not read/write file")]
@ -58,44 +21,76 @@ pub enum PakError {
HeaderError,
}
/// A full PAK file with a header and its contents
#[derive(Debug, Clone)]
pub struct Pak {
header: Header,
unknown_pre_data: Vec<u32>,
entries: Vec<Entry>,
unknown_flag_data: Vec<u8>,
path: PathBuf,
rebuild: bool, // TODO: Look into a better way to indicate this, or if it's needed at all
}
pub struct PakFlags(u32);
impl PakFlags {
pub fn has_names(&self) -> bool {
// 0b01000000000
self.0 & 0x200 != 0
}
pub fn has_offsets(&self) -> bool {
// 0b10000000000
self.0 & 0x400 != 0
}
}
type LE = LittleEndian;
impl Pak {
/// Convenience method to open a PAK file from a path and decode it
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<Self, PakError> {
let mut file = File::open(path)?;
let filename = path.as_ref().file_name().unwrap().to_string_lossy().to_string();
Pak::decode(&mut file, filename)
Pak::decode(&mut file, path.as_ref().to_path_buf())
}
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T, file_name: String) -> Result<Self, PakError> {
/// Decode a PAK file from a byte stream
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T, path: PathBuf) -> Result<Self, PakError> {
let mut input = BufReader::new(input);
// Read in all the header bytes
let header = Header {
data_offset: input.read_u32::<LE>().unwrap(),
file_count: input.read_u32::<LE>().unwrap(),
id_start: input.read_u32::<LE>().unwrap(),
block_size: input.read_u32::<LE>().unwrap(),
unknown1: input.read_u32::<LE>().unwrap(),
unknown2: input.read_u32::<LE>().unwrap(),
unknown3: input.read_u32::<LE>().unwrap(),
unknown4: input.read_u32::<LE>().unwrap(),
flags: input.read_u32::<LE>().unwrap(),
data_offset: input.read_u32::<LE>()?,
entry_count: input.read_u32::<LE>()?,
id_start: input.read_u32::<LE>()?,
block_size: input.read_u32::<LE>()?,
unknown1: input.read_u32::<LE>()?,
unknown2: input.read_u32::<LE>()?,
unknown3: input.read_u32::<LE>()?,
unknown4: input.read_u32::<LE>()?,
flags: input.read_u32::<LE>()?,
};
dbg!(&header);
let first_offset = header.data_offset() / header.block_size();
// Seek to the end of the header
input.seek(io::SeekFrom::Start(0x24))?;
// Read some unknown data before the data we want
let mut unknown_pre_data = Vec::new();
while input.stream_position()? < header.data_offset() as u64 {
if input.read_u32::<LE>().unwrap() == first_offset {
let unknown = input.read_u32::<LE>()?;
if unknown == first_offset {
input.seek_relative(-4)?;
break;
}
unknown_pre_data.push(unknown);
}
dbg!(unknown_pre_data.len());
if input.stream_position()? == header.data_offset() as u64 {
return Err(PakError::HeaderError)
@ -103,65 +98,83 @@ impl Pak {
// Read all the offsets and lengths
let mut offsets = Vec::new();
for _ in 0..header.file_count() {
for _ in 0..header.entry_count() {
let offset = input.read_u32::<LE>().unwrap();
let length = input.read_u32::<LE>().unwrap();
dbg!(offset);
dbg!(length);
offsets.push((offset, length));
}
// Read all the file names
let mut file_names = Vec::new();
let mut buf = Vec::new();
for _ in 0..header.file_count() {
buf.clear();
input.read_until(0x00, &mut buf)?;
buf.pop();
let mut string_buf = Vec::new();
for _ in 0..header.entry_count() {
string_buf.clear();
input.read_until(0x00, &mut string_buf)?;
string_buf.pop();
let strbuf = String::from_utf8(buf.clone()).unwrap();
let strbuf = String::from_utf8_lossy(&string_buf).to_string();
file_names.push(strbuf.clone());
}
dbg!(&file_names);
let unknown_flag_size = header.data_offset() as u64 - input.stream_position()?;
let mut unknown_flag_data = vec![0u8; unknown_flag_size as usize];
input.read_exact(&mut unknown_flag_data)?;
// Read all entry data
let mut entries: Vec<Entry> = Vec::new();
for i in 0..header.file_count() as usize {
dbg!(i);
for i in 0..header.entry_count() as usize {
// Seek to and read the entry data
input.seek(SeekFrom::Start(offsets[i].0 as u64 * header.block_size() as u64)).unwrap();
let mut data = vec![0u8; offsets[i].1 as usize];
input.read_exact(&mut data).unwrap();
// Build the entry from the data we know
// Build the entry from the data we now know
let entry = Entry {
offset: offsets[i].0,
length: offsets[i].1,
data,
name: file_names[i].clone(),
id: 0,
name: Some(file_names[i].clone()),
id: header.id_start + i as u32,
replace: false,
};
entries.push(entry);
}
println!("Got entries for {} files", entries.len());
Ok(Pak {
header,
files: entries,
file_name,
unknown_pre_data,
entries,
unknown_flag_data,
path,
rebuild: false,
})
}
pub fn get_file(&self, index: u32) -> Option<&Entry> {
self.files.get(index as usize)
/// Get the header information from the PAK
pub fn header(&self) -> &Header {
&self.header
}
pub fn files(&self) -> &Vec<Entry> {
&self.files
/// Get an individual entry from the PAK by its index
pub fn get_entry(&self, index: u32) -> Option<&Entry> {
self.entries.get(index as usize)
}
/// Get an individual entry from the PAK by its ID
pub fn get_entry_by_id(&self, id: u32) -> Option<&Entry> {
self.entries.get((id - self.header.id_start) as usize)
}
/// Get a list of all entries from the PAK
pub fn entries(&self) -> &Vec<Entry> {
&self.entries
}
pub fn contains_name(&self, name: String) -> bool {
self.entries
.iter()
.find(|e|
e.name.as_ref().is_some_and(|n| n == &name)
).is_some()
}
}

View file

@ -1,11 +1,11 @@
use luca_pak::Pak;
fn main() {
let pak = Pak::open("PARAM.PAK").unwrap();
let pak = Pak::open("MANUAL.PAK").unwrap();
println!("{:#032b}", pak.header().flags());
let file = pak.get_file(0).unwrap();
dbg!(pak.files());
file.save("test").unwrap();
for entry in pak.entries() {
println!("{}", entry.name().as_ref().unwrap());
println!("{}", entry);
}
}

View file

@ -51,6 +51,10 @@ enum Commands {
/// Output CZ file version
#[arg(short, long, value_name = "CZ VERSION")]
version: Option<u8>,
/// Output CZ file bit depth
#[arg(short, long, value_name = "BIT DEPTH")]
depth: Option<u8>,
}
}
@ -108,15 +112,7 @@ fn main() {
cz.save_as_png(&final_path).unwrap();
}
} else {
let cz = match DynamicCz::open(input) {
Ok(cz) => cz,
Err(err) => {
Error::raw(
ErrorKind::ValueValidation,
format!("Could not open input as a CZ file: {}\n", err)
).exit()
},
};
let cz = DynamicCz::open(input).unwrap();
if let Some(output) = output {
cz.save_as_png(output).unwrap();
@ -126,7 +122,7 @@ fn main() {
}
}
}
Commands::Replace { batch, input, replacement, output, version } => {
Commands::Replace { batch, input, replacement, output, version, depth } => {
if !input.exists() {
Error::raw(
ErrorKind::ValueValidation,
@ -204,6 +200,10 @@ fn main() {
cz.set_bitmap(repl_img.into_raw());
cz.remove_palette();
if let Some(depth) = depth {
cz.header_mut().set_depth(*depth as u16)
}
if let Some(ver) = version {
match cz.header_mut().set_version(*ver) {
Ok(_) => (),