Compare commits

..

No commits in common. "6b8bfa64055a24dc829c2421adf9d96d22d37c07" and "c371ae4e8190c2a4e4dcd71992879d040a7162aa" have entirely different histories.

8 changed files with 1 additions and 263 deletions

4
.gitignore vendored
View file

@ -19,8 +19,4 @@ Cargo.lock
*.ttf *.ttf
*.otf *.otf
# Ignore PAK files
*.PAK
*.pak
test_files/* test_files/*

View file

@ -2,7 +2,6 @@
resolver = "2" resolver = "2"
members = [ members = [
"cz", "cz",
"luca_pak",
"utils", "utils",
] ]

View file

@ -34,7 +34,7 @@ impl DynamicCz {
/// The input must begin with the /// The input must begin with the
/// [magic bytes](https://en.wikipedia.org/wiki/File_format#Magic_number) /// [magic bytes](https://en.wikipedia.org/wiki/File_format#Magic_number)
/// of the file /// of the file
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Self, CzError> { fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Self, CzError> {
// Get the header common to all CZ images // Get the header common to all CZ images
let header_common = CommonHeader::from_bytes(input)?; let header_common = CommonHeader::from_bytes(input)?;
let mut header_extended = None; let mut header_extended = None;

View file

@ -1,16 +0,0 @@
[package]
name = "luca_pak"
description = """
A crate for parsing and modifying PAK files from the LUCA System engine by
Prototype Ltd.
"""
version = "0.1.0"
edition = "2021"
authors.workspace = true
[dependencies]
byteorder = "1.5.0"
thiserror = "1.0.61"
[lints]
workspace = true

View file

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 G2
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,42 +0,0 @@
use std::{error::Error, fs::File, io::{BufWriter, Write}, path::Path};
/// A single file entry in a PAK file
#[derive(Debug, Clone)]
pub struct Entry {
pub(super) offset: u32,
pub(super) length: u32,
pub(super) data: Vec<u8>,
pub(super) name: String,
pub(super) id: u8,
pub(super) replace: bool, // TODO: Look into a better way to indicate this
}
impl Entry {
/// Get the name of the [`Entry`]
pub fn name(&self) -> &String {
&self.name
}
/// Save an [`Entry`] as its underlying data to a file
pub fn save<P: ?Sized + AsRef<Path>>(&self, path: &P) -> Result<(), Box<dyn Error>> {
let mut path = path.as_ref().to_path_buf();
if !path.is_dir() {
return Err("Path must be a directory".into());
}
// Save the file to <folder> + <file name>
path.push(&self.name);
let mut out_file = BufWriter::new(File::create(path)?);
out_file.write_all(&self.data)?;
out_file.flush()?;
Ok(())
}
/// Get the raw byte data of an [`Entry`]
pub fn as_bytes(&self) -> &Vec<u8> {
&self.data
}
}

View file

@ -1,167 +0,0 @@
mod entry;
use std::{fs::File, io::{self, BufRead, BufReader, Read, Seek, SeekFrom}, path::Path};
use byteorder::{LittleEndian, ReadBytesExt};
use thiserror::Error;
use crate::entry::Entry;
/// A full PAK file with a header and its contents
#[derive(Debug, Clone)]
pub struct Pak {
header: Header,
files: Vec<Entry>,
file_name: String,
rebuild: bool, // TODO: Look into a better way to indicate this
}
/// The header of a PAK file
#[derive(Debug, Clone)]
struct Header {
data_offset: u32,
file_count: u32,
id_start: u32,
block_size: u32,
unknown1: u32,
unknown2: u32,
unknown3: u32,
unknown4: u32,
flags: u32,
}
impl Header {
pub fn block_size(&self) -> u32 {
self.block_size
}
pub fn file_count(&self) -> u32 {
self.file_count
}
pub fn data_offset(&self) -> u32 {
self.data_offset
}
}
#[derive(Error, Debug)]
pub enum PakError {
#[error("Could not read/write file")]
IoError(#[from] io::Error),
#[error("Expected {} files, got {} in {}", 0, 1, 2)]
FileCountMismatch(usize, usize, &'static str),
#[error("Malformed header information")]
HeaderError,
}
type LE = LittleEndian;
impl Pak {
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<Self, PakError> {
let mut file = File::open(path)?;
let filename = path.as_ref().file_name().unwrap().to_string_lossy().to_string();
Pak::decode(&mut file, filename)
}
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T, file_name: String) -> Result<Self, PakError> {
let mut input = BufReader::new(input);
// Read in all the header bytes
let header = Header {
data_offset: input.read_u32::<LE>().unwrap(),
file_count: input.read_u32::<LE>().unwrap(),
id_start: input.read_u32::<LE>().unwrap(),
block_size: input.read_u32::<LE>().unwrap(),
unknown1: input.read_u32::<LE>().unwrap(),
unknown2: input.read_u32::<LE>().unwrap(),
unknown3: input.read_u32::<LE>().unwrap(),
unknown4: input.read_u32::<LE>().unwrap(),
flags: input.read_u32::<LE>().unwrap(),
};
dbg!(&header);
let first_offset = header.data_offset() / header.block_size();
// Seek to the end of the header
input.seek(io::SeekFrom::Start(0x24))?;
while input.stream_position()? < header.data_offset() as u64 {
if input.read_u32::<LE>().unwrap() == first_offset {
input.seek_relative(-4)?;
break;
}
}
if input.stream_position()? == header.data_offset() as u64 {
return Err(PakError::HeaderError)
}
// Read all the offsets and lengths
let mut offsets = Vec::new();
for _ in 0..header.file_count() {
let offset = input.read_u32::<LE>().unwrap();
let length = input.read_u32::<LE>().unwrap();
dbg!(offset);
dbg!(length);
offsets.push((offset, length));
}
// Read all the file names
let mut file_names = Vec::new();
let mut buf = Vec::new();
for _ in 0..header.file_count() {
buf.clear();
input.read_until(0x00, &mut buf)?;
buf.pop();
let strbuf = String::from_utf8(buf.clone()).unwrap();
file_names.push(strbuf.clone());
}
dbg!(&file_names);
let mut entries: Vec<Entry> = Vec::new();
for i in 0..header.file_count() as usize {
dbg!(i);
// Seek to and read the entry data
input.seek(SeekFrom::Start(offsets[i].0 as u64 * header.block_size() as u64)).unwrap();
let mut data = vec![0u8; offsets[i].1 as usize];
input.read_exact(&mut data).unwrap();
// Build the entry from the data we know
let entry = Entry {
offset: offsets[i].0,
length: offsets[i].1,
data,
name: file_names[i].clone(),
id: 0,
replace: false,
};
entries.push(entry);
}
println!("Got entries for {} files", entries.len());
Ok(Pak {
header,
files: entries,
file_name,
rebuild: false,
})
}
pub fn get_file(&self, index: u32) -> Option<&Entry> {
self.files.get(index as usize)
}
pub fn files(&self) -> &Vec<Entry> {
&self.files
}
}

View file

@ -1,11 +0,0 @@
use luca_pak::Pak;
fn main() {
let pak = Pak::open("PARAM.PAK").unwrap();
let file = pak.get_file(0).unwrap();
dbg!(pak.files());
file.save("test").unwrap();
}