mirror of
https://github.com/G2-Games/lbee-utils.git
synced 2025-04-19 15:22:53 -05:00
Initial PAK reading implementation
This commit is contained in:
parent
1ae93d1ed4
commit
44735abbec
7 changed files with 242 additions and 1 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -19,4 +19,8 @@ Cargo.lock
|
||||||
*.ttf
|
*.ttf
|
||||||
*.otf
|
*.otf
|
||||||
|
|
||||||
|
# Ignore PAK files
|
||||||
|
*.PAK
|
||||||
|
*.pak
|
||||||
|
|
||||||
test_files/*
|
test_files/*
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"cz",
|
"cz",
|
||||||
|
"luca_pak",
|
||||||
"utils",
|
"utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ impl DynamicCz {
|
||||||
/// The input must begin with the
|
/// The input must begin with the
|
||||||
/// [magic bytes](https://en.wikipedia.org/wiki/File_format#Magic_number)
|
/// [magic bytes](https://en.wikipedia.org/wiki/File_format#Magic_number)
|
||||||
/// of the file
|
/// of the file
|
||||||
fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Self, CzError> {
|
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Self, CzError> {
|
||||||
// Get the header common to all CZ images
|
// Get the header common to all CZ images
|
||||||
let header_common = CommonHeader::from_bytes(input)?;
|
let header_common = CommonHeader::from_bytes(input)?;
|
||||||
let mut header_extended = None;
|
let mut header_extended = None;
|
||||||
|
|
16
luca_pak/Cargo.toml
Normal file
16
luca_pak/Cargo.toml
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
[package]
|
||||||
|
name = "luca_pak"
|
||||||
|
description = """
|
||||||
|
A crate for parsing and modifying PAK files from the LUCA System engine by
|
||||||
|
Prototype Ltd.
|
||||||
|
"""
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
authors.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
byteorder = "1.5.0"
|
||||||
|
thiserror = "1.0.61"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
42
luca_pak/src/entry.rs
Normal file
42
luca_pak/src/entry.rs
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
use std::{error::Error, fs::File, io::{BufWriter, Write}, path::Path};
|
||||||
|
|
||||||
|
/// A single file entry in a PAK file
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Entry {
|
||||||
|
pub(super) offset: u32,
|
||||||
|
pub(super) length: u32,
|
||||||
|
pub(super) data: Vec<u8>,
|
||||||
|
pub(super) name: String,
|
||||||
|
pub(super) id: u8,
|
||||||
|
pub(super) replace: bool, // TODO: Look into a better way to indicate this
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Entry {
|
||||||
|
/// Get the name of the [`Entry`]
|
||||||
|
pub fn name(&self) -> &String {
|
||||||
|
&self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Save an [`Entry`] as its underlying data to a file
|
||||||
|
pub fn save<P: ?Sized + AsRef<Path>>(&self, path: &P) -> Result<(), Box<dyn Error>> {
|
||||||
|
let mut path = path.as_ref().to_path_buf();
|
||||||
|
if !path.is_dir() {
|
||||||
|
return Err("Path must be a directory".into());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the file to <folder> + <file name>
|
||||||
|
path.push(&self.name);
|
||||||
|
|
||||||
|
let mut out_file = BufWriter::new(File::create(path)?);
|
||||||
|
|
||||||
|
out_file.write_all(&self.data)?;
|
||||||
|
out_file.flush()?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the raw byte data of an [`Entry`]
|
||||||
|
pub fn as_bytes(&self) -> &Vec<u8> {
|
||||||
|
&self.data
|
||||||
|
}
|
||||||
|
}
|
167
luca_pak/src/lib.rs
Normal file
167
luca_pak/src/lib.rs
Normal file
|
@ -0,0 +1,167 @@
|
||||||
|
mod entry;
|
||||||
|
|
||||||
|
use std::{fs::File, io::{self, BufRead, BufReader, Read, Seek, SeekFrom}, path::Path};
|
||||||
|
use byteorder::{LittleEndian, ReadBytesExt};
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::entry::Entry;
|
||||||
|
|
||||||
|
/// A full PAK file with a header and its contents
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Pak {
|
||||||
|
header: Header,
|
||||||
|
files: Vec<Entry>,
|
||||||
|
|
||||||
|
file_name: String,
|
||||||
|
rebuild: bool, // TODO: Look into a better way to indicate this
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The header of a PAK file
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct Header {
|
||||||
|
data_offset: u32,
|
||||||
|
file_count: u32,
|
||||||
|
id_start: u32,
|
||||||
|
block_size: u32,
|
||||||
|
|
||||||
|
unknown1: u32,
|
||||||
|
unknown2: u32,
|
||||||
|
unknown3: u32,
|
||||||
|
unknown4: u32,
|
||||||
|
|
||||||
|
flags: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Header {
|
||||||
|
pub fn block_size(&self) -> u32 {
|
||||||
|
self.block_size
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn file_count(&self) -> u32 {
|
||||||
|
self.file_count
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn data_offset(&self) -> u32 {
|
||||||
|
self.data_offset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum PakError {
|
||||||
|
#[error("Could not read/write file")]
|
||||||
|
IoError(#[from] io::Error),
|
||||||
|
|
||||||
|
#[error("Expected {} files, got {} in {}", 0, 1, 2)]
|
||||||
|
FileCountMismatch(usize, usize, &'static str),
|
||||||
|
|
||||||
|
#[error("Malformed header information")]
|
||||||
|
HeaderError,
|
||||||
|
}
|
||||||
|
|
||||||
|
type LE = LittleEndian;
|
||||||
|
|
||||||
|
impl Pak {
|
||||||
|
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<Self, PakError> {
|
||||||
|
let mut file = File::open(path)?;
|
||||||
|
|
||||||
|
let filename = path.as_ref().file_name().unwrap().to_string_lossy().to_string();
|
||||||
|
|
||||||
|
Pak::decode(&mut file, filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T, file_name: String) -> Result<Self, PakError> {
|
||||||
|
let mut input = BufReader::new(input);
|
||||||
|
|
||||||
|
// Read in all the header bytes
|
||||||
|
let header = Header {
|
||||||
|
data_offset: input.read_u32::<LE>().unwrap(),
|
||||||
|
file_count: input.read_u32::<LE>().unwrap(),
|
||||||
|
id_start: input.read_u32::<LE>().unwrap(),
|
||||||
|
block_size: input.read_u32::<LE>().unwrap(),
|
||||||
|
unknown1: input.read_u32::<LE>().unwrap(),
|
||||||
|
unknown2: input.read_u32::<LE>().unwrap(),
|
||||||
|
unknown3: input.read_u32::<LE>().unwrap(),
|
||||||
|
unknown4: input.read_u32::<LE>().unwrap(),
|
||||||
|
flags: input.read_u32::<LE>().unwrap(),
|
||||||
|
};
|
||||||
|
dbg!(&header);
|
||||||
|
|
||||||
|
let first_offset = header.data_offset() / header.block_size();
|
||||||
|
|
||||||
|
// Seek to the end of the header
|
||||||
|
input.seek(io::SeekFrom::Start(0x24))?;
|
||||||
|
while input.stream_position()? < header.data_offset() as u64 {
|
||||||
|
if input.read_u32::<LE>().unwrap() == first_offset {
|
||||||
|
input.seek_relative(-4)?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if input.stream_position()? == header.data_offset() as u64 {
|
||||||
|
return Err(PakError::HeaderError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read all the offsets and lengths
|
||||||
|
let mut offsets = Vec::new();
|
||||||
|
for _ in 0..header.file_count() {
|
||||||
|
let offset = input.read_u32::<LE>().unwrap();
|
||||||
|
let length = input.read_u32::<LE>().unwrap();
|
||||||
|
|
||||||
|
dbg!(offset);
|
||||||
|
dbg!(length);
|
||||||
|
|
||||||
|
offsets.push((offset, length));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read all the file names
|
||||||
|
let mut file_names = Vec::new();
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
for _ in 0..header.file_count() {
|
||||||
|
buf.clear();
|
||||||
|
input.read_until(0x00, &mut buf)?;
|
||||||
|
buf.pop();
|
||||||
|
|
||||||
|
let strbuf = String::from_utf8(buf.clone()).unwrap();
|
||||||
|
file_names.push(strbuf.clone());
|
||||||
|
}
|
||||||
|
dbg!(&file_names);
|
||||||
|
|
||||||
|
let mut entries: Vec<Entry> = Vec::new();
|
||||||
|
for i in 0..header.file_count() as usize {
|
||||||
|
dbg!(i);
|
||||||
|
|
||||||
|
// Seek to and read the entry data
|
||||||
|
input.seek(SeekFrom::Start(offsets[i].0 as u64 * header.block_size() as u64)).unwrap();
|
||||||
|
let mut data = vec![0u8; offsets[i].1 as usize];
|
||||||
|
input.read_exact(&mut data).unwrap();
|
||||||
|
|
||||||
|
// Build the entry from the data we know
|
||||||
|
let entry = Entry {
|
||||||
|
offset: offsets[i].0,
|
||||||
|
length: offsets[i].1,
|
||||||
|
data,
|
||||||
|
name: file_names[i].clone(),
|
||||||
|
id: 0,
|
||||||
|
replace: false,
|
||||||
|
};
|
||||||
|
entries.push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Got entries for {} files", entries.len());
|
||||||
|
|
||||||
|
Ok(Pak {
|
||||||
|
header,
|
||||||
|
files: entries,
|
||||||
|
file_name,
|
||||||
|
rebuild: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_file(&self, index: u32) -> Option<&Entry> {
|
||||||
|
self.files.get(index as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn files(&self) -> &Vec<Entry> {
|
||||||
|
&self.files
|
||||||
|
}
|
||||||
|
}
|
11
luca_pak/src/main.rs
Normal file
11
luca_pak/src/main.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
use luca_pak::Pak;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let pak = Pak::open("PARAM.PAK").unwrap();
|
||||||
|
|
||||||
|
let file = pak.get_file(0).unwrap();
|
||||||
|
|
||||||
|
dbg!(pak.files());
|
||||||
|
|
||||||
|
file.save("test").unwrap();
|
||||||
|
}
|
Loading…
Reference in a new issue