mirror of
https://github.com/G2-Games/lbee-utils.git
synced 2025-04-19 07:12:55 -05:00
Compare commits
43 commits
utils-0.1.
...
main
Author | SHA1 | Date | |
---|---|---|---|
3e4b51cad5 | |||
250ff2255c | |||
d33323fd87 | |||
982f55fde6 | |||
1ffc88d379 | |||
957f3e637d | |||
3437b6c7a9 | |||
3c4d7a89ec | |||
01fdb340fa | |||
b74dc0344e | |||
e68ca53ab5 | |||
a7a486999d | |||
2f881ce294 | |||
dd99a0d834 | |||
b36ea64d84 | |||
ec60308e6a | |||
d253d57816 | |||
263a990e6e | |||
db52e23ef7 | |||
4d58df5d15 | |||
be0aaeeeb6 | |||
aaf7f9b30f | |||
a2c62ffc4a | |||
de6f543898 | |||
c2cf2c6793 | |||
d67329bcc1 | |||
a93ba81859 | |||
c6404f6a09 | |||
6ab957c243 | |||
9be4a56823 | |||
4e150eac32 | |||
d35edab93e | |||
d972dc0161 | |||
e9963afa4d | |||
42014a6eb2 | |||
fddcf2f055 | |||
d9bd35f075 | |||
871340d2bb | |||
9bb24a73f4 | |||
810002c1fa | |||
483a8b41b3 | |||
33418680b2 | |||
9e6f4a90af |
38 changed files with 2084 additions and 401 deletions
115
.github/workflows/build_pak_explorer.yml
vendored
Normal file
115
.github/workflows/build_pak_explorer.yml
vendored
Normal file
|
@ -0,0 +1,115 @@
|
|||
name: Publish PAK Explorer on release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ 'explorer-*.*.*' ]
|
||||
|
||||
env:
|
||||
BINARIES: "--bin pak_explorer"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build binaries for Windows and Linux
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: '📄 Checkout'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: '⚙️ Set up Rust environment'
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: x86_64-pc-windows-gnu, i686-pc-windows-gnu, x86_64-unknown-linux-gnu, aarch64-unknown-linux-gnu
|
||||
|
||||
- name: '🔽 Cache Rust dependencies'
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.OS }}-build-
|
||||
|
||||
- name: '🔄 Set up additional requirements'
|
||||
run: |
|
||||
sudo apt-get install -y gcc-mingw-w64
|
||||
sudo apt-get install -y libasound2t64
|
||||
pip install cargo-zigbuild
|
||||
|
||||
- name: '📦 Package Windows x86_64'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo build --profile production --target x86_64-pc-windows-gnu $BINARIES
|
||||
cp target/x86_64-pc-windows-gnu/production/pak_explorer.exe PAK-Explorer_Windows-x86_64.exe
|
||||
gh release upload ${{ github.ref_name }} PAK-Explorer_Windows-x86_64.exe
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.TOKEN }}
|
||||
shell: bash
|
||||
|
||||
- name: '📦 Package Windows i686'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo build --profile production --target i686-pc-windows-gnu $BINARIES
|
||||
cp target/i686-pc-windows-gnu/production/pak_explorer.exe PAK-Explorer_Windows-x86.exe
|
||||
gh release upload ${{ github.ref_name }} PAK-Explorer_Windows-x86.exe
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.TOKEN }}
|
||||
shell: bash
|
||||
|
||||
- name: '📦 Package Linux x86_64'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo build --profile production --target x86_64-unknown-linux-gnu $BINARIES
|
||||
cp target/x86_64-unknown-linux-gnu/production/pak_explorer PAK-Explorer_Linux-x86_64
|
||||
gh release upload ${{ github.ref_name }} PAK-Explorer_Linux-x86_64
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.TOKEN }}
|
||||
shell: bash
|
||||
|
||||
- name: '📦 Package Linux aarch64'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo zigbuild --profile production --target aarch64-unknown-linux-gnu $BINARIES
|
||||
cp target/aarch64-unknown-linux-gnu/production/pak_explorer PAK-Explorer_Linux-Arm64
|
||||
gh release upload ${{ github.ref_name }} PAK-Explorer_Linux-Arm64
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.TOKEN }}
|
||||
shell: bash
|
||||
|
||||
build-mac:
|
||||
name: Build binaries for MacOS
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- name: '📄 Checkout'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: '⚙️ Set up Rust environment'
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: x86_64-apple-darwin, aarch64-apple-darwin
|
||||
|
||||
- name: '🔽 Cache Rust dependencies'
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.OS }}-build-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.OS }}-build-
|
||||
|
||||
- name: '🔄 Set up additional requirements'
|
||||
run: |
|
||||
brew install zig
|
||||
cargo install --locked cargo-zigbuild
|
||||
|
||||
- name: '📦 Package MacOS Universal2'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo zigbuild --manifest-path "pak_explorer/Cargo.toml" --profile production --target universal2-apple-darwin $BINARIES
|
||||
cp target/universal2-apple-darwin/production/pak_explorer PAK-Explorer_Mac-Universal
|
||||
gh release upload ${{ github.ref_name }} PAK-Explorer_Mac-Universal
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.TOKEN }}
|
||||
shell: bash
|
2
.github/workflows/build_utils.yml
vendored
2
.github/workflows/build_utils.yml
vendored
|
@ -106,8 +106,6 @@ jobs:
|
|||
- name: '📦 Package MacOS Universal2'
|
||||
run: |
|
||||
cd ${{github.workspace}}
|
||||
cargo zigbuild --manifest-path "cz/Cargo.toml" --profile production --target universal2-apple-darwin
|
||||
cargo zigbuild --manifest-path "luca_pak/Cargo.toml" --profile production --target universal2-apple-darwin
|
||||
cargo zigbuild --manifest-path "utils/Cargo.toml" --profile production --target universal2-apple-darwin $BINARIES
|
||||
zip -j lbee-utils_Mac-Universal.zip target/universal2-apple-darwin/production/czutil target/universal2-apple-darwin/production/pakutil
|
||||
gh release upload ${{ github.ref_name }} lbee-utils_Mac-Universal.zip
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -9,7 +9,8 @@ Cargo.lock
|
|||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# Ignore text files
|
||||
# Ignore documentation and stuff
|
||||
*.pdf
|
||||
*.txt
|
||||
|
||||
# Ignore testing files
|
||||
|
|
|
@ -3,7 +3,7 @@ resolver = "2"
|
|||
members = [
|
||||
"cz",
|
||||
"pak_explorer",
|
||||
"luca_pak", "utils",
|
||||
"luca_pak", "utils", "luca_script",
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
|
|
56
README.md
56
README.md
|
@ -1,4 +1,9 @@
|
|||
# lbee-utils
|
||||
<p align="center">
|
||||
<img width="80%" src="https://github.com/user-attachments/assets/6807854b-aa4b-431d-933f-9e5b63ff5ed3">
|
||||
</p>
|
||||
|
||||
|
||||
# Lbee-Utils
|
||||
A small collection of utilities for exporting and importing assets from games
|
||||
made with LUCA System by [Prototype Ltd](https://www.prot.co.jp/).
|
||||
|
||||
|
@ -9,6 +14,8 @@ Tested on the following games:
|
|||
- Kanon (2024)
|
||||
- planetarian \~Snow Globe~ (Nintendo Switch) (2024)
|
||||
|
||||
Please test on your own games and open an issue if something isn't working.
|
||||
|
||||
## Acknowledgments
|
||||
The implementation of compresssion and decompression of CZ1, CZ2, CZ3, and CZ4
|
||||
was derived from [LuckSystem](https://github.com/wetor/LuckSystem). The
|
||||
|
@ -42,46 +49,18 @@ metadata can't be changed as of yet, however.
|
|||
## Programs
|
||||
|
||||
### [lbee-utils](https://github.com/G2-Games/lbee-utils/releases/tag/utils-0.1.1)
|
||||
Small command line tools for modifying CZ images and PAK archives. Usage for each
|
||||
is as follows:
|
||||
Small command line tools for modifying CZ images and PAK archives.
|
||||
|
||||
#### pakutil
|
||||
To install with Cargo:
|
||||
```
|
||||
Utility to maniuplate PAK archive files from the LUCA System game engine by Prototype Ltd
|
||||
|
||||
Usage: pakutil <PAK FILE> <COMMAND>
|
||||
|
||||
Commands:
|
||||
extract Extracts the contents of a PAK file into a folder
|
||||
replace Replace the entries in a PAK file
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Arguments:
|
||||
<PAK FILE>
|
||||
|
||||
Options:
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
cargo install --git https://github.com/G2-Games/lbee-utils lbee-utils
|
||||
```
|
||||
|
||||
#### czutil
|
||||
```
|
||||
Utility to maniuplate CZ image files from the LUCA System game engine by Prototype Ltd
|
||||
Otherwise, download the binaries from the Releases page here.
|
||||
|
||||
Usage: czutil <COMMAND>
|
||||
|
||||
Commands:
|
||||
decode Converts a CZ file to a PNG
|
||||
replace Replace a CZ file's image data
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
```
|
||||
------
|
||||
|
||||
### [PAK Explorer](https://github.com/G2-Games/lbee-utils/releases/tag/explorer-0.1.1)
|
||||
### [PAK Explorer](https://github.com/G2-Games/lbee-utils/releases/tag/explorer-0.1.2)
|
||||
This is a basic explorer application for PAK files which allows you to see
|
||||
their contents, replace the contents, extract files, and save them again.
|
||||
|
||||
|
@ -91,3 +70,12 @@ anything important as they offer many more options and allow for batch
|
|||
operations on many files at once.
|
||||
|
||||

|
||||
|
||||
|
||||
To install with Cargo:
|
||||
|
||||
```
|
||||
cargo install --git https://github.com/G2-Games/lbee-utils pak_explorer
|
||||
```
|
||||
|
||||
Otherwise, download the binaries from the Releases page here.
|
||||
|
|
|
@ -9,17 +9,11 @@ Prototype Ltd.
|
|||
license = "MIT"
|
||||
authors.workspace = true
|
||||
|
||||
[features]
|
||||
png = ["dep:image"]
|
||||
|
||||
[dependencies]
|
||||
byteorder = "1.5"
|
||||
thiserror = "1.0"
|
||||
byteorder-lite = "0.1"
|
||||
thiserror = "2.0"
|
||||
imagequant = "4.3"
|
||||
rgb = "0.8"
|
||||
|
||||
# Only active on PNG feature
|
||||
image = { version = "0.25", optional = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use byteorder::ReadBytesExt;
|
||||
use imagequant::Attributes;
|
||||
use rgb::{ComponentSlice, RGBA8};
|
||||
use std::{
|
||||
|
@ -35,10 +34,7 @@ impl Palette {
|
|||
}
|
||||
|
||||
/// Get a palette from the input stream, beginning where the palette starts.
|
||||
pub fn get_palette<T: Seek + ReadBytesExt + Read>(
|
||||
input: &mut T,
|
||||
num_colors: usize,
|
||||
) -> Result<Palette, CzError> {
|
||||
pub fn get_palette<T: Seek + Read>(input: &mut T, num_colors: usize) -> Result<Palette, CzError> {
|
||||
let mut colormap = Vec::with_capacity(num_colors);
|
||||
let mut rgba_buf = [0u8; 4];
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use std::io::{self, Read, Seek, Write};
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use byteorder_lite::{ReadBytesExt, WriteBytesExt, LE};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
|
@ -43,7 +43,7 @@ pub enum CzVersion {
|
|||
}
|
||||
|
||||
impl TryFrom<u8> for CzVersion {
|
||||
type Error = &'static str;
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: u8) -> Result<Self, Self::Error> {
|
||||
let value = match value {
|
||||
|
@ -53,7 +53,7 @@ impl TryFrom<u8> for CzVersion {
|
|||
3 => Self::CZ3,
|
||||
4 => Self::CZ4,
|
||||
5 => Self::CZ5,
|
||||
_ => return Err("Value is not a valid CZ version"),
|
||||
v => return Err(format!("{} is not a valid CZ version", v)),
|
||||
};
|
||||
|
||||
Ok(value)
|
||||
|
@ -61,7 +61,7 @@ impl TryFrom<u8> for CzVersion {
|
|||
}
|
||||
|
||||
impl TryFrom<char> for CzVersion {
|
||||
type Error = &'static str;
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: char) -> Result<Self, Self::Error> {
|
||||
let value = match value {
|
||||
|
@ -71,7 +71,7 @@ impl TryFrom<char> for CzVersion {
|
|||
'3' => Self::CZ3,
|
||||
'4' => Self::CZ4,
|
||||
'5' => Self::CZ5,
|
||||
_ => return Err("Value is not a valid CZ version"),
|
||||
v => return Err(format!("{} is not a valid CZ version", v)),
|
||||
};
|
||||
|
||||
Ok(value)
|
||||
|
@ -132,10 +132,10 @@ impl CommonHeader {
|
|||
|
||||
let mut header = Self {
|
||||
version,
|
||||
length: bytes.read_u32::<LittleEndian>()?,
|
||||
width: bytes.read_u16::<LittleEndian>()?,
|
||||
height: bytes.read_u16::<LittleEndian>()?,
|
||||
depth: bytes.read_u16::<LittleEndian>()?,
|
||||
length: bytes.read_u32::<LE>()?,
|
||||
width: bytes.read_u16::<LE>()?,
|
||||
height: bytes.read_u16::<LE>()?,
|
||||
depth: bytes.read_u16::<LE>()?,
|
||||
unknown: bytes.read_u8()?,
|
||||
};
|
||||
|
||||
|
@ -204,10 +204,10 @@ impl CommonHeader {
|
|||
let magic_bytes = [b'C', b'Z', b'0' + self.version as u8, b'\0'];
|
||||
|
||||
output.write_all(&magic_bytes)?;
|
||||
output.write_u32::<LittleEndian>(self.length() as u32)?;
|
||||
output.write_u16::<LittleEndian>(self.width())?;
|
||||
output.write_u16::<LittleEndian>(self.height())?;
|
||||
output.write_u16::<LittleEndian>(self.depth())?;
|
||||
output.write_u32::<LE>(self.length() as u32)?;
|
||||
output.write_u16::<LE>(self.width())?;
|
||||
output.write_u16::<LE>(self.height())?;
|
||||
output.write_u16::<LE>(self.depth())?;
|
||||
output.write_u8(self.color_block())?;
|
||||
|
||||
Ok(())
|
||||
|
@ -282,27 +282,27 @@ impl ExtendedHeader {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn from_bytes<T: Seek + ReadBytesExt + Read>(
|
||||
pub fn from_bytes<T: Seek + Read>(
|
||||
input: &mut T,
|
||||
common_header: &CommonHeader,
|
||||
) -> Result<Self, CzError> {
|
||||
let mut unknown_1 = [0u8; 5];
|
||||
input.read_exact(&mut unknown_1)?;
|
||||
|
||||
let crop_width = input.read_u16::<LittleEndian>()?;
|
||||
let crop_height = input.read_u16::<LittleEndian>()?;
|
||||
let crop_width = input.read_u16::<LE>()?;
|
||||
let crop_height = input.read_u16::<LE>()?;
|
||||
|
||||
let bounds_width = input.read_u16::<LittleEndian>()?;
|
||||
let bounds_height = input.read_u16::<LittleEndian>()?;
|
||||
let bounds_width = input.read_u16::<LE>()?;
|
||||
let bounds_height = input.read_u16::<LE>()?;
|
||||
|
||||
let mut offset_width = None;
|
||||
let mut offset_height = None;
|
||||
let mut unknown_2 = None;
|
||||
if common_header.length() > 28 {
|
||||
offset_width = Some(input.read_u16::<LittleEndian>()?);
|
||||
offset_height = Some(input.read_u16::<LittleEndian>()?);
|
||||
offset_width = Some(input.read_u16::<LE>()?);
|
||||
offset_height = Some(input.read_u16::<LE>()?);
|
||||
|
||||
unknown_2 = Some(input.read_u32::<LittleEndian>()?);
|
||||
unknown_2 = Some(input.read_u32::<LE>()?);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
|
@ -321,17 +321,17 @@ impl ExtendedHeader {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn write_into<T: WriteBytesExt + Write>(&self, output: &mut T) -> Result<(), io::Error> {
|
||||
pub fn write_into<T: Write>(&self, output: &mut T) -> Result<(), io::Error> {
|
||||
output.write_all(&self.unknown_1)?;
|
||||
output.write_u16::<LittleEndian>(self.crop_width)?;
|
||||
output.write_u16::<LittleEndian>(self.crop_height)?;
|
||||
output.write_u16::<LittleEndian>(self.bounds_width)?;
|
||||
output.write_u16::<LittleEndian>(self.bounds_height)?;
|
||||
output.write_u16::<LE>(self.crop_width)?;
|
||||
output.write_u16::<LE>(self.crop_height)?;
|
||||
output.write_u16::<LE>(self.bounds_width)?;
|
||||
output.write_u16::<LE>(self.bounds_height)?;
|
||||
|
||||
if self.offset_width.is_some() {
|
||||
output.write_u16::<LittleEndian>(self.offset_width.unwrap())?;
|
||||
output.write_u16::<LittleEndian>(self.offset_height.unwrap())?;
|
||||
output.write_u32::<LittleEndian>(self.unknown_2.unwrap())?;
|
||||
output.write_u16::<LE>(self.offset_width.unwrap())?;
|
||||
output.write_u16::<LE>(self.offset_height.unwrap())?;
|
||||
output.write_u32::<LE>(self.unknown_2.unwrap())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{Read, Seek, Write},
|
||||
|
@ -6,6 +5,7 @@ use std::{
|
|||
|
||||
use crate::binio::BitIo;
|
||||
use crate::common::CzError;
|
||||
use byteorder_lite::{ReadBytesExt, WriteBytesExt, LE};
|
||||
|
||||
/// The size of compressed data in each chunk
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
@ -37,15 +37,12 @@ pub struct CompressionInfo {
|
|||
}
|
||||
|
||||
impl CompressionInfo {
|
||||
pub fn write_into<T: WriteBytesExt + Write>(
|
||||
&self,
|
||||
output: &mut T,
|
||||
) -> Result<(), std::io::Error> {
|
||||
output.write_u32::<LittleEndian>(self.chunk_count as u32)?;
|
||||
pub fn write_into<T: Write>(&self, output: &mut T) -> Result<(), std::io::Error> {
|
||||
output.write_u32::<LE>(self.chunk_count as u32)?;
|
||||
|
||||
for chunk in &self.chunks {
|
||||
output.write_u32::<LittleEndian>(chunk.size_compressed as u32)?;
|
||||
output.write_u32::<LittleEndian>(chunk.size_raw as u32)?;
|
||||
output.write_u32::<LE>(chunk.size_compressed as u32)?;
|
||||
output.write_u32::<LE>(chunk.size_raw as u32)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -56,10 +53,8 @@ impl CompressionInfo {
|
|||
///
|
||||
/// These are defined by a length value, followed by the number of data chunks
|
||||
/// that length value says split into compressed and original size u32 values
|
||||
pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
|
||||
bytes: &mut T,
|
||||
) -> Result<CompressionInfo, CzError> {
|
||||
let parts_count = bytes.read_u32::<LittleEndian>()?;
|
||||
pub fn get_chunk_info<T: Seek + Read>(bytes: &mut T) -> Result<CompressionInfo, CzError> {
|
||||
let parts_count = bytes.read_u32::<LE>()?;
|
||||
|
||||
let mut part_sizes = vec![];
|
||||
let mut total_size = 0;
|
||||
|
@ -67,10 +62,10 @@ pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
|
|||
|
||||
// Loop over the compressed bytes
|
||||
for _ in 0..parts_count {
|
||||
let compressed_size = bytes.read_u32::<LittleEndian>()?;
|
||||
let compressed_size = bytes.read_u32::<LE>()?;
|
||||
total_size = i32::wrapping_add(total_size, compressed_size as i32);
|
||||
|
||||
let raw_size = bytes.read_u32::<LittleEndian>()?;
|
||||
let raw_size = bytes.read_u32::<LE>()?;
|
||||
total_size_raw = u32::wrapping_add(total_size_raw, raw_size);
|
||||
|
||||
part_sizes.push(ChunkInfo {
|
||||
|
@ -89,7 +84,7 @@ pub fn get_chunk_info<T: Seek + ReadBytesExt + Read>(
|
|||
}
|
||||
|
||||
/// Decompress an LZW compressed stream like CZ1
|
||||
pub fn decompress<T: Seek + ReadBytesExt + Read>(
|
||||
pub fn decompress<T: Seek + Read>(
|
||||
input: &mut T,
|
||||
chunk_info: &CompressionInfo,
|
||||
) -> Result<Vec<u8>, CzError> {
|
||||
|
@ -99,7 +94,7 @@ pub fn decompress<T: Seek + ReadBytesExt + Read>(
|
|||
let mut buffer = vec![0u16; block.size_compressed];
|
||||
|
||||
for word in buffer.iter_mut() {
|
||||
*word = input.read_u16::<LittleEndian>().unwrap();
|
||||
*word = input.read_u16::<LE>().unwrap();
|
||||
}
|
||||
|
||||
let raw_buf = decompress_lzw(&buffer, block.size_raw);
|
||||
|
@ -144,7 +139,7 @@ fn decompress_lzw(input_data: &[u16], size: usize) -> Vec<u8> {
|
|||
}
|
||||
|
||||
/// Decompress an LZW compressed stream like CZ2
|
||||
pub fn decompress2<T: Seek + ReadBytesExt + Read>(
|
||||
pub fn decompress2<T: Seek + Read>(
|
||||
input: &mut T,
|
||||
chunk_info: &CompressionInfo,
|
||||
) -> Result<Vec<u8>, CzError> {
|
||||
|
@ -163,7 +158,7 @@ pub fn decompress2<T: Seek + ReadBytesExt + Read>(
|
|||
}
|
||||
|
||||
fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
|
||||
let mut data = input_data.to_vec();
|
||||
let data = input_data;
|
||||
let mut dictionary = HashMap::new();
|
||||
for i in 0..256 {
|
||||
dictionary.insert(i as u64, vec![i as u8]);
|
||||
|
@ -172,12 +167,15 @@ fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
|
|||
let mut result = Vec::with_capacity(size);
|
||||
|
||||
let data_size = input_data.len();
|
||||
data.extend_from_slice(&[0, 0]);
|
||||
let mut bit_io = BitIo::new(data);
|
||||
let mut bit_io = BitIo::new(data.to_vec());
|
||||
let mut w = dictionary.get(&0).unwrap().clone();
|
||||
|
||||
let mut element;
|
||||
loop {
|
||||
if bit_io.byte_offset() >= data_size - 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
let flag = bit_io.read_bit(1);
|
||||
if flag == 0 {
|
||||
element = bit_io.read_bit(15);
|
||||
|
@ -185,10 +183,6 @@ fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
|
|||
element = bit_io.read_bit(18);
|
||||
}
|
||||
|
||||
if bit_io.byte_offset() > data_size {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut entry;
|
||||
if let Some(x) = dictionary.get(&element) {
|
||||
// If the element was already in the dict, get it
|
||||
|
@ -197,7 +191,11 @@ fn decompress_lzw2(input_data: &[u8], size: usize) -> Vec<u8> {
|
|||
entry = w.clone();
|
||||
entry.push(w[0])
|
||||
} else {
|
||||
panic!("Bad compressed element: {}", element)
|
||||
panic!(
|
||||
"Bad compressed element {} at offset {}",
|
||||
element,
|
||||
bit_io.byte_offset()
|
||||
)
|
||||
}
|
||||
|
||||
//println!("{}", element);
|
||||
|
@ -391,7 +389,7 @@ fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
|||
|
||||
if dictionary_count >= 0x3FFFE {
|
||||
count -= 1;
|
||||
break
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,11 +400,13 @@ fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
|||
write_bit(&mut bit_io, *dictionary.get(&vec![c]).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
return (count, bit_io.bytes(), Vec::new());
|
||||
} else if bit_io.byte_size() < 0x87BDF {
|
||||
if !last_element.is_empty() {
|
||||
write_bit(&mut bit_io, *dictionary.get(&last_element).unwrap());
|
||||
}
|
||||
|
||||
return (count, bit_io.bytes(), Vec::new());
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use byteorder::ReadBytesExt;
|
||||
use byteorder_lite::ReadBytesExt;
|
||||
use rgb::ComponentSlice;
|
||||
use std::{
|
||||
fs::File,
|
||||
|
@ -13,7 +13,7 @@ use crate::{
|
|||
|
||||
/// A CZ# interface which can open and save any CZ file type.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DynamicCz {
|
||||
pub struct CzFile {
|
||||
header_common: CommonHeader,
|
||||
header_extended: Option<ExtendedHeader>,
|
||||
|
||||
|
@ -24,7 +24,7 @@ pub struct DynamicCz {
|
|||
bitmap: Vec<u8>,
|
||||
}
|
||||
|
||||
impl DynamicCz {
|
||||
impl CzFile {
|
||||
/// Decode a CZ# file from anything that implements [`Read`] and [`Seek`]
|
||||
///
|
||||
/// The input must begin with the
|
||||
|
@ -192,43 +192,9 @@ impl DynamicCz {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Save the CZ# image as a lossless PNG file.
|
||||
///
|
||||
/// Internally, the [`DynamicCz`] struct operates on 32-bit RGBA values,
|
||||
/// which is the highest encountered in CZ# files, therefore saving them
|
||||
/// as a PNG of the same or better quality is lossless.
|
||||
#[cfg(feature = "png")]
|
||||
pub fn save_as_png<P: ?Sized + AsRef<std::path::Path>>(
|
||||
&self,
|
||||
path: &P,
|
||||
) -> Result<(), image::error::EncodingError> {
|
||||
let size = (self.header_common.width() as u32 * self.header_common.height() as u32) * 4;
|
||||
|
||||
let mut buf = vec![0; size as usize];
|
||||
buf[..self.bitmap.len()].copy_from_slice(&self.bitmap);
|
||||
|
||||
let image = image::RgbaImage::from_raw(
|
||||
self.header_common.width() as u32,
|
||||
self.header_common.height() as u32,
|
||||
buf.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
image
|
||||
.save_with_format(path, image::ImageFormat::Png)
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a CZ# image from RGBA bytes. The bytes *must* be RGBA, as that
|
||||
/// is the only format that is used internally.
|
||||
pub fn from_raw(
|
||||
version: CzVersion,
|
||||
width: u16,
|
||||
height: u16,
|
||||
bitmap: Vec<u8>,
|
||||
) -> Self {
|
||||
pub fn from_raw(version: CzVersion, width: u16, height: u16, bitmap: Vec<u8>) -> Self {
|
||||
let header_common = CommonHeader::new(version, width, height);
|
||||
|
||||
Self {
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use std::io::{Read, Seek, Write};
|
||||
|
||||
use crate::common::CzError;
|
||||
|
||||
pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
pub fn decode<T: Seek + Read>(input: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
// Get the rest of the file, which is the bitmap
|
||||
let mut bitmap = vec![];
|
||||
input.read_to_end(&mut bitmap)?;
|
||||
|
@ -11,7 +10,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(input: &mut T) -> Result<Vec<u8>, C
|
|||
Ok(bitmap)
|
||||
}
|
||||
|
||||
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
pub fn encode<T: Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
output.write_all(bitmap)?;
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
use crate::common::CzError;
|
||||
use crate::compression::{compress, decompress, get_chunk_info};
|
||||
|
||||
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
pub fn decode<T: Seek + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
// Get information about the compressed chunks
|
||||
let block_info = get_chunk_info(bytes)?;
|
||||
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
|
||||
|
@ -15,7 +14,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, C
|
|||
Ok(bitmap)
|
||||
}
|
||||
|
||||
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
pub fn encode<T: Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
let (compressed_data, compressed_info) = compress(bitmap, 0xFEFD);
|
||||
|
||||
compressed_info.write_into(output)?;
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
use crate::common::CzError;
|
||||
use crate::compression::{compress2, decompress2, get_chunk_info};
|
||||
|
||||
pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
pub fn decode<T: Seek + Read>(bytes: &mut T) -> Result<Vec<u8>, CzError> {
|
||||
// Get information about the compressed chunks
|
||||
let block_info = get_chunk_info(bytes)?;
|
||||
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
|
||||
|
@ -15,8 +14,8 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(bytes: &mut T) -> Result<Vec<u8>, C
|
|||
Ok(bitmap)
|
||||
}
|
||||
|
||||
pub fn encode<T: WriteBytesExt + Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
let (compressed_data, compressed_info) = compress2(&bitmap);
|
||||
pub fn encode<T: Write>(output: &mut T, bitmap: &[u8]) -> Result<(), CzError> {
|
||||
let (compressed_data, compressed_info) = compress2(bitmap);
|
||||
|
||||
compressed_info.write_into(output)?;
|
||||
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
use crate::common::{CommonHeader, CzError};
|
||||
use crate::compression::{compress, decompress, get_chunk_info};
|
||||
|
||||
pub fn decode<T: Seek + ReadBytesExt + Read>(
|
||||
bytes: &mut T,
|
||||
header: &CommonHeader,
|
||||
) -> Result<Vec<u8>, CzError> {
|
||||
pub fn decode<T: Seek + Read>(bytes: &mut T, header: &CommonHeader) -> Result<Vec<u8>, CzError> {
|
||||
let block_info = get_chunk_info(bytes)?;
|
||||
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
|
||||
|
||||
|
@ -18,7 +14,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
|
|||
Ok(bitmap)
|
||||
}
|
||||
|
||||
pub fn encode<T: WriteBytesExt + Write>(
|
||||
pub fn encode<T: Write>(
|
||||
output: &mut T,
|
||||
bitmap: &[u8],
|
||||
header: &CommonHeader,
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use std::io::{Read, Seek, SeekFrom, Write};
|
||||
|
||||
use crate::common::{CommonHeader, CzError};
|
||||
use crate::compression::{compress, decompress, get_chunk_info};
|
||||
|
||||
pub fn decode<T: Seek + ReadBytesExt + Read>(
|
||||
bytes: &mut T,
|
||||
header: &CommonHeader,
|
||||
) -> Result<Vec<u8>, CzError> {
|
||||
pub fn decode<T: Seek + Read>(bytes: &mut T, header: &CommonHeader) -> Result<Vec<u8>, CzError> {
|
||||
let block_info = get_chunk_info(bytes)?;
|
||||
bytes.seek(SeekFrom::Start(block_info.length as u64))?;
|
||||
|
||||
|
@ -18,7 +14,7 @@ pub fn decode<T: Seek + ReadBytesExt + Read>(
|
|||
Ok(bitmap)
|
||||
}
|
||||
|
||||
pub fn encode<T: WriteBytesExt + Write>(
|
||||
pub fn encode<T: Write>(
|
||||
output: &mut T,
|
||||
bitmap: &[u8],
|
||||
header: &CommonHeader,
|
||||
|
|
|
@ -17,14 +17,14 @@ use common::CzError;
|
|||
use std::{io::BufReader, path::Path};
|
||||
|
||||
/// Open a CZ# file from a path
|
||||
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<DynamicCz, CzError> {
|
||||
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<CzFile, CzError> {
|
||||
let mut img_file = BufReader::new(std::fs::File::open(path)?);
|
||||
|
||||
DynamicCz::decode(&mut img_file)
|
||||
CzFile::decode(&mut img_file)
|
||||
}
|
||||
|
||||
#[doc(inline)]
|
||||
pub use dynamic::DynamicCz;
|
||||
pub use dynamic::CzFile;
|
||||
|
||||
/*
|
||||
#[doc(inline)]
|
||||
|
|
86
cz/tests/round_trip.rs
Normal file
86
cz/tests/round_trip.rs
Normal file
|
@ -0,0 +1,86 @@
|
|||
use std::io::Cursor;
|
||||
|
||||
use cz::{common::CzVersion, CzFile};
|
||||
|
||||
const KODIM03: (u16, u16, &[u8]) = (128, 128, include_bytes!("test_images/kodim03.rgba"));
|
||||
const KODIM23: (u16, u16, &[u8]) = (225, 225, include_bytes!("test_images/kodim23.rgba"));
|
||||
const SQPTEXT: (u16, u16, &[u8]) = (2048, 810, include_bytes!("test_images/sqp_text.rgba"));
|
||||
const DPFLOGO: (u16, u16, &[u8]) = (1123, 639, include_bytes!("test_images/dpf_logo.rgba"));
|
||||
|
||||
type TestImage = (u16, u16, &'static [u8]);
|
||||
const TEST_IMAGES: &[TestImage] = &[KODIM03, KODIM23, SQPTEXT, DPFLOGO];
|
||||
|
||||
#[test]
|
||||
fn cz0_round_trip() {
|
||||
for image in TEST_IMAGES {
|
||||
let original_cz = CzFile::from_raw(CzVersion::CZ0, image.0, image.1, image.2.to_vec());
|
||||
|
||||
let mut cz_bytes = Vec::new();
|
||||
original_cz.encode(&mut cz_bytes).unwrap();
|
||||
|
||||
let mut cz_bytes = Cursor::new(cz_bytes);
|
||||
let decoded_cz = CzFile::decode(&mut cz_bytes).unwrap();
|
||||
|
||||
assert_eq!(original_cz.as_raw(), decoded_cz.as_raw());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cz1_round_trip() {
|
||||
for image in TEST_IMAGES {
|
||||
let original_cz = CzFile::from_raw(CzVersion::CZ1, image.0, image.1, image.2.to_vec());
|
||||
|
||||
let mut cz_bytes = Vec::new();
|
||||
original_cz.encode(&mut cz_bytes).unwrap();
|
||||
|
||||
let mut cz_bytes = Cursor::new(cz_bytes);
|
||||
let decoded_cz = CzFile::decode(&mut cz_bytes).unwrap();
|
||||
|
||||
assert_eq!(original_cz.as_raw(), decoded_cz.as_raw());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cz2_round_trip() {
|
||||
for image in TEST_IMAGES {
|
||||
let original_cz = CzFile::from_raw(CzVersion::CZ2, image.0, image.1, image.2.to_vec());
|
||||
|
||||
let mut cz_bytes = Vec::new();
|
||||
original_cz.encode(&mut cz_bytes).unwrap();
|
||||
|
||||
let mut cz_bytes = Cursor::new(cz_bytes);
|
||||
let decoded_cz = CzFile::decode(&mut cz_bytes).unwrap();
|
||||
|
||||
assert_eq!(original_cz.as_raw(), decoded_cz.as_raw());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cz3_round_trip() {
|
||||
for image in TEST_IMAGES {
|
||||
let original_cz = CzFile::from_raw(CzVersion::CZ3, image.0, image.1, image.2.to_vec());
|
||||
|
||||
let mut cz_bytes = Vec::new();
|
||||
original_cz.encode(&mut cz_bytes).unwrap();
|
||||
|
||||
let mut cz_bytes = Cursor::new(cz_bytes);
|
||||
let decoded_cz = CzFile::decode(&mut cz_bytes).unwrap();
|
||||
|
||||
assert_eq!(original_cz.as_raw(), decoded_cz.as_raw());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cz4_round_trip() {
|
||||
for image in TEST_IMAGES {
|
||||
let original_cz = CzFile::from_raw(CzVersion::CZ4, image.0, image.1, image.2.to_vec());
|
||||
|
||||
let mut cz_bytes = Vec::new();
|
||||
original_cz.encode(&mut cz_bytes).unwrap();
|
||||
|
||||
let mut cz_bytes = Cursor::new(cz_bytes);
|
||||
let decoded_cz = CzFile::decode(&mut cz_bytes).unwrap();
|
||||
|
||||
assert_eq!(original_cz.as_raw(), decoded_cz.as_raw());
|
||||
}
|
||||
}
|
BIN
cz/tests/test_images/dpf_logo.rgba
Normal file
BIN
cz/tests/test_images/dpf_logo.rgba
Normal file
Binary file not shown.
216
cz/tests/test_images/kodim03.rgba
Normal file
216
cz/tests/test_images/kodim03.rgba
Normal file
File diff suppressed because one or more lines are too long
1
cz/tests/test_images/kodim23.rgba
Normal file
1
cz/tests/test_images/kodim23.rgba
Normal file
File diff suppressed because one or more lines are too long
BIN
cz/tests/test_images/sqp_text.rgba
Normal file
BIN
cz/tests/test_images/sqp_text.rgba
Normal file
Binary file not shown.
2
documents/DOCUMENTS.md
Normal file
2
documents/DOCUMENTS.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
# Typst Documents
|
||||
To compile typst documents (with the `.typ` extension), use the [Typst compiler](https://github.com/typst/typst).
|
238
documents/cz_format_spec.typ
Normal file
238
documents/cz_format_spec.typ
Normal file
|
@ -0,0 +1,238 @@
|
|||
#let ver = version(0, 4, 0)
|
||||
#set document(
|
||||
title: "The CZx Image Formats - " + str(ver),
|
||||
author: "G2",
|
||||
)
|
||||
#set text(
|
||||
font: "Roboto",
|
||||
lang: "en",
|
||||
size: 9pt,
|
||||
)
|
||||
#set page(
|
||||
numbering: "1",
|
||||
margin: 1.5cm,
|
||||
paper: "a4",
|
||||
)
|
||||
#set par(leading: 0.7em)
|
||||
#set block(spacing: 1.7em)
|
||||
|
||||
// Styling
|
||||
#show link: underline
|
||||
#show link: set text(blue)
|
||||
|
||||
#text(size: 22pt, weight: "bold", font: "Roboto Slab")[The CZx Image Formats]
|
||||
#v(1.5em, weak: true)
|
||||
#text(size: 1.1em)[Specification #strong[Version #ver] — Sept. 11, 2024]
|
||||
|
||||
#line(length: 100%, stroke: 1.5pt + gray)
|
||||
|
||||
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD",
|
||||
"SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be
|
||||
interpreted as described in IETF
|
||||
#link("https://datatracker.ietf.org/doc/html/rfc2119")[RFC2119].
|
||||
|
||||
The CZx family of image formats (CZ0, CZ1, CZ2, CZ3, CZ4, and CZ5) are used in
|
||||
the LUCA System visual novel engine developed by
|
||||
#link("https://www.prot.co.jp/")[Prototype Ltd]\. These image formats can be
|
||||
used for storing lossless compressed and uncompressed pixel data over a wide
|
||||
range of bit depths and with accompanying metadata useful for a visual novel.
|
||||
All bytes in CZx files MUST be stored in little-endian format.
|
||||
|
||||
#show heading: set text(1.2em)
|
||||
#show heading.where(level: 1): head => [
|
||||
#set text(18pt, font: "Roboto Slab", weight: "bold")
|
||||
#head
|
||||
#v(0.3em)
|
||||
]
|
||||
#show heading.where(level: 2): set text(weight: 600)
|
||||
#show raw: it => [
|
||||
#box(stroke: 1pt, width: 100%, inset: 5pt, radius: 3pt)[
|
||||
#it
|
||||
]
|
||||
]
|
||||
|
||||
= Header
|
||||
|
||||
#columns(2)[
|
||||
|
||||
== Block 1 — Basic Header
|
||||
All CZx files MUST begin with this header block. The header contains information
|
||||
about basic parameters of the image, such as the bitmap dimensions. The common
|
||||
part of the header is as follows:
|
||||
|
||||
```rust
|
||||
CommonHeader {
|
||||
magic: [char; 4], // magic bytes, ex. “CZ0\0”
|
||||
header_length: u8, // header length in bytes
|
||||
|
||||
width: u16, // image width in pixels
|
||||
height: u16, // image height in pixels
|
||||
bit_depth: u16, // bit depth (BPP)
|
||||
unknown: u8, // unknown purpose, often 3
|
||||
}
|
||||
```
|
||||
|
||||
This common header MAY be followed by an extended header, which contains
|
||||
metadata such as cropping parameters and positioning on the screen. This part of
|
||||
the header exists if the header_length value is greater than 15 bytes, with the
|
||||
exception of the CZ2 format. An example of the extended header is as follows:
|
||||
|
||||
```rust
|
||||
ExtendedHeader {
|
||||
unknown: [u8; 5], // Set to 0
|
||||
|
||||
crop_width: u16, // width of image crop
|
||||
crop_height: u16, // height of image crop
|
||||
|
||||
bounds_width: u16, // width of crop bounds
|
||||
bounds_height: u16, // height of crop bounds
|
||||
}
|
||||
```
|
||||
|
||||
The extended header MAY be followed by image offset information, used for
|
||||
positioning the image. This information only exists if the header_length value
|
||||
is greater than 28 bytes. An example of the offset header is as follows:
|
||||
|
||||
```rust
|
||||
OffsetHeader {
|
||||
offset_width: u16,
|
||||
offset_height: u16,
|
||||
|
||||
unknown: [u8; 4],
|
||||
}
|
||||
```
|
||||
|
||||
== Block 2 — Indexed Color Information
|
||||
If the depth of the image is 8 bits per pixel, the header MUST be followed by a
|
||||
palette block containing the information required to properly decode an image
|
||||
which is encoded in indexed color.
|
||||
|
||||
The palette is an ordered list of colors. The color in the first position MUST
|
||||
correspond to an index value of 0 in the image, the second corresponding to a
|
||||
value of 1, and so on. These colors are stored in 8 bit RGBA format.
|
||||
|
||||
The length of the palette corresponds to the bit depth of the image. Therefore,
|
||||
the color list MUST be 256 colors long.
|
||||
|
||||
```rust
|
||||
Color {
|
||||
red: u8,
|
||||
green: u8,
|
||||
blue: u8,
|
||||
alpha: u8,
|
||||
}
|
||||
|
||||
ColorPalette {
|
||||
colors: [Color; 256]
|
||||
}
|
||||
```
|
||||
|
||||
== Block 3 — Compression Information <compression-info>
|
||||
All CZx formats except for CZ0 MUST have a block immediately following the color
|
||||
information which contains information about the size of chunks in the following
|
||||
compressed image data. The compression block starts with the number of
|
||||
compressed blocks, followed by a list of the sizes of the compressed data and
|
||||
original data.
|
||||
|
||||
```rust
|
||||
ChunkInfo {
|
||||
compressed_size: u32, // compressed size, bytes
|
||||
original_size: u32, // original size, bytes
|
||||
}
|
||||
|
||||
CompressionInfo {
|
||||
chunk_number: u32, // the number of chunks
|
||||
chunks: ChunkInfo,
|
||||
}
|
||||
```
|
||||
]
|
||||
|
||||
#pagebreak()
|
||||
|
||||
= File Types
|
||||
|
||||
#columns(2)[
|
||||
|
||||
== CZ0
|
||||
CZ0 files are uncompressed, storing raw RGBA pixel data in a linear bitmap.
|
||||
|
||||
This format is most often used to store character sprites, UI elements, and
|
||||
various other game assets. Use of CZ0 has decreased in more recent LUCA System
|
||||
games.
|
||||
|
||||
The encoding used in these files is a simple bitmap of RGBA pixels. Decoding CZ0
|
||||
is as simple as reading the header to determine the width and height of the
|
||||
image in pixels, and then reading the image data as 4 byte RGBA chunks, which
|
||||
correspond directly to pixels.
|
||||
|
||||
== CZ1
|
||||
CZ1 files are compressed, storing raw RGBA pixel data using LZW compression.
|
||||
|
||||
This format is used to store text bitmaps in older LUCA System games, along with
|
||||
UI elements and other small image assets in more recent games. It is most often
|
||||
encountered with 8 bit indexed color, but 32 bit RGBA is also relatively common.
|
||||
|
||||
== CZ2
|
||||
CZ2 files are compressed, storing raw RGBA pixel data using LZW compression.
|
||||
This method of compression is different from CZ1.
|
||||
|
||||
This format is primarily used for storing text bitmaps in newer LUCA System
|
||||
games. Its use outside of text bitmaps is limited.
|
||||
|
||||
#colbreak()
|
||||
|
||||
== CZ3
|
||||
CZ3 files are compressed, storing modified RGBA pixel data using LZW
|
||||
compression. This compression scheme is the same as CZ1.
|
||||
|
||||
This format is primarily used for storing backgrounds, but is also used for
|
||||
sprites, character graphics, and general files. It appears to be the most
|
||||
general form of more highly compressed CZx files. The compression ratios
|
||||
achieved by CZ3 are similar to or slightly worse than a
|
||||
PNG file with a compression level of 5.
|
||||
|
||||
== CZ4
|
||||
CZ4 files are compressed, storing modified RGBA pixel data using LZW
|
||||
compression. This compression scheme is the same as CZ1.
|
||||
|
||||
This format only appears in newer LUCA System games, and is primarily used for
|
||||
storing sprites, character graphics, and backgrounds. It seems to have replaced
|
||||
the use of CZ3 files and CZ0 files in many places in the engine, but not
|
||||
entirely. The compression ratios achieved by CZ4 are similar to or slightly
|
||||
better than a PNG file with a compression level of 9.
|
||||
|
||||
== CZ5
|
||||
Little is known about the CZ5 format, as it has not been encountered in any
|
||||
released games so far. The only information about it has come from decompiling
|
||||
recent games which use the LUCA System engine, where it is referenced as part of
|
||||
the decoder for CZx files.
|
||||
|
||||
]
|
||||
|
||||
#v(2em)
|
||||
#line(length: 100%, stroke: 1.5pt + gray)
|
||||
|
||||
= Compression Methods
|
||||
The two types of compression used in CZx files are Type 1 (used in CZ1, CZ3, and
|
||||
CZ4 files) and Type 2 (used in CZ2 files). On top of these two types, CZ3 and
|
||||
CZ4 have extra modifications to the image data to make it possible to compress
|
||||
them further. Both of these methods are dictionary based compression algorithms.
|
||||
|
||||
== Type 1 (CZ1-style)
|
||||
Type 1 compression is a dictionary based compression algorithm that has a
|
||||
fixed-length block size. The data MUST be read and written in blocks which are
|
||||
sized according to the compressed_size value in the compression information
|
||||
section of the header. When creating compressed data, the block size (which
|
||||
determines the compressed_size value) SHOULD be set to 0xFEFD, however, it MAY
|
||||
be set to a smaller value, and MUST NOT be set to a larger value, as this will
|
||||
break compatibility with existing decoders.
|
||||
|
||||
To decode Type 1 compression,
|
||||
|
||||
== Type 2 (CZ2-style)
|
||||
Type 2 compression is a dictionary based compression algorithm that has a
|
||||
variable-length block size. The data MUST be read in blocks which are sized
|
||||
according to the compressed_size value in the
|
||||
#link(<compression-info>)[compression information] section of the header. When
|
||||
creating compressed data, the block size is dynamic based on the number of
|
||||
entries that can fit in the 18-bit dictionary.
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "luca_pak"
|
||||
edition = "2021"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
description = """
|
||||
An encoder/decoder for PAK archive files used in the LUCA System engine by
|
||||
Prototype Ltd.
|
||||
|
@ -10,9 +10,9 @@ license = "MIT"
|
|||
authors.workspace = true
|
||||
|
||||
[dependencies]
|
||||
byteorder = "1.5.0"
|
||||
log = "0.4.22"
|
||||
thiserror = "1.0.61"
|
||||
byteorder-lite = "0.1"
|
||||
log = "0.4"
|
||||
thiserror = "2.0"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use std::{
|
||||
error::Error, fs::File, io::{BufWriter, Write}, path::Path
|
||||
error::Error,
|
||||
fs::File,
|
||||
io::{BufWriter, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// A single file entry in a PAK file
|
||||
|
@ -55,15 +58,30 @@ impl Entry {
|
|||
self.length as usize
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
/// Get the raw byte data of an [`Entry`]
|
||||
pub fn as_bytes(&self) -> &Vec<u8> {
|
||||
&self.data
|
||||
}
|
||||
|
||||
/// Get the byte data of an entry, but fixed to be compatible with normal things
|
||||
pub fn cloned_bytes_fixed(&self) -> Vec<u8> {
|
||||
match self.file_type() {
|
||||
EntryType::OGGPAK => {
|
||||
dbg!(self.data[15]);
|
||||
self.data[15..].to_vec()
|
||||
},
|
||||
_ => self.data.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> String {
|
||||
let mut name = self.name().clone().unwrap_or(self.id().to_string());
|
||||
let entry_type = self.file_type();
|
||||
name.push_str(&entry_type.extension());
|
||||
name.push_str(entry_type.extension());
|
||||
|
||||
name
|
||||
}
|
||||
|
@ -81,15 +99,21 @@ impl Entry {
|
|||
}
|
||||
} else if self.data[0..3] == [b'M', b'V', b'T'] {
|
||||
EntryType::MVT
|
||||
} else if self.data[0..4] == [b'R', b'I', b'F', b'F'] {
|
||||
EntryType::WAV
|
||||
} else if self.data[0..4] == [b'O', b'g', b'g', b'S'] {
|
||||
EntryType::OGG
|
||||
} else if self.data[0..6] == [b'O', b'G', b'G', b'P', b'A', b'K'] {
|
||||
EntryType::OGGPAK
|
||||
} else {
|
||||
EntryType::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum EntryType {
|
||||
// CZ image files
|
||||
CZ0,
|
||||
CZ1,
|
||||
CZ2,
|
||||
|
@ -100,6 +124,14 @@ pub enum EntryType {
|
|||
/// An MVT video file
|
||||
MVT,
|
||||
|
||||
/// OGG Audio file
|
||||
OGG,
|
||||
/// OGGPAK Audio file
|
||||
OGGPAK,
|
||||
|
||||
/// Wav Audio file
|
||||
WAV,
|
||||
|
||||
/// Who knows!
|
||||
Unknown,
|
||||
}
|
||||
|
@ -115,6 +147,9 @@ impl EntryType {
|
|||
Self::CZ4 => ".cz4",
|
||||
Self::CZ5 => ".cz5",
|
||||
Self::MVT => ".mvt",
|
||||
Self::OGG => ".ogg",
|
||||
Self::OGGPAK => ".oggpak",
|
||||
Self::WAV => ".wav",
|
||||
Self::Unknown => "",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use byteorder_lite::WriteBytesExt;
|
||||
use std::io::{self, Write};
|
||||
use byteorder::WriteBytesExt;
|
||||
|
||||
use crate::LE;
|
||||
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
pub mod entry;
|
||||
pub mod header;
|
||||
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
use byteorder_lite::{ReadBytesExt, WriteBytesExt, LE};
|
||||
use header::Header;
|
||||
use log::{debug, info};
|
||||
use std::{
|
||||
ffi::CString, fs::File, io::{self, BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write}, path::{Path, PathBuf}
|
||||
ffi::CString,
|
||||
fs::File,
|
||||
io::{self, BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use thiserror::Error;
|
||||
use byteorder::WriteBytesExt;
|
||||
|
||||
type LE = LittleEndian;
|
||||
|
||||
use crate::{entry::Entry, header::PakFlags};
|
||||
|
||||
|
@ -20,8 +20,11 @@ pub enum PakError {
|
|||
#[error("Could not read/write file")]
|
||||
IoError(#[from] io::Error),
|
||||
|
||||
#[error("Expected {} files, got {} in {}", 0, 1, 2)]
|
||||
FileCountMismatch(usize, usize, &'static str),
|
||||
#[error("Expected {0} files, got {1} in {2}")]
|
||||
EntryCountMismatch(usize, usize, &'static str),
|
||||
|
||||
#[error("Number of entries in header ({0}) exceeds limit of {1}")]
|
||||
EntryLimit(u32, usize),
|
||||
|
||||
#[error("Malformed header information")]
|
||||
HeaderError,
|
||||
|
@ -35,9 +38,11 @@ pub enum PakError {
|
|||
pub struct Pak {
|
||||
subdirectory: Option<String>,
|
||||
|
||||
/// The path of the PAK file, can serve as an identifier or name as the
|
||||
/// The path to the PAK file, can serve as an identifier or name as the
|
||||
/// header has no name for the file.
|
||||
path: PathBuf,
|
||||
|
||||
/// Header information
|
||||
header: Header,
|
||||
|
||||
unknown_pre_data: Vec<u32>,
|
||||
|
@ -46,23 +51,38 @@ pub struct Pak {
|
|||
entries: Vec<Entry>,
|
||||
}
|
||||
|
||||
struct FileLocation {
|
||||
struct EntryLocation {
|
||||
offset: u32,
|
||||
length: u32,
|
||||
}
|
||||
|
||||
pub struct PakLimits {
|
||||
pub entry_limit: usize,
|
||||
pub size_limit: usize,
|
||||
}
|
||||
|
||||
impl Default for PakLimits {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
entry_limit: 100_000, // 100,000 entries
|
||||
size_limit: u32::MAX as usize, // 10 gb
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Pak {
|
||||
/// Convenience method to open a PAK file from a path and decode it
|
||||
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<Self, PakError> {
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
Pak::decode(&mut file, path.as_ref().to_path_buf())
|
||||
Pak::decode(&mut file, path.as_ref().to_path_buf(), PakLimits::default())
|
||||
}
|
||||
|
||||
/// Decode a PAK file from a byte stream.
|
||||
pub fn decode<T: Seek + Read>(
|
||||
input: &mut T,
|
||||
path: PathBuf,
|
||||
limits: PakLimits,
|
||||
) -> Result<Self, PakError> {
|
||||
info!("Reading pak from {:?}", path);
|
||||
let mut input = BufReader::new(input);
|
||||
|
@ -80,6 +100,10 @@ impl Pak {
|
|||
unknown4: input.read_u32::<LE>()?,
|
||||
flags: PakFlags(input.read_u32::<LE>()?),
|
||||
};
|
||||
|
||||
if header.entry_count >= limits.entry_limit as u32 {
|
||||
return Err(PakError::EntryLimit(header.entry_count, limits.entry_limit));
|
||||
}
|
||||
info!("{} entries detected", header.entry_count);
|
||||
debug!("Block size is {} bytes", header.block_size);
|
||||
debug!("Flag bits {:#032b}", header.flags().0);
|
||||
|
@ -87,6 +111,7 @@ impl Pak {
|
|||
let first_offset = header.data_offset() / header.block_size();
|
||||
|
||||
// Read some unknown data before the data we want
|
||||
// TODO: This *must* be done differently for real, figure it out!
|
||||
let mut unknown_pre_data = Vec::new();
|
||||
while input.stream_position()? < header.data_offset() as u64 {
|
||||
let unknown = input.read_u32::<LE>()?;
|
||||
|
@ -111,10 +136,7 @@ impl Pak {
|
|||
for _ in 0..header.entry_count() {
|
||||
let offset = input.read_u32::<LE>().unwrap();
|
||||
let length = input.read_u32::<LE>().unwrap();
|
||||
offsets.push(FileLocation {
|
||||
offset,
|
||||
length,
|
||||
});
|
||||
offsets.push(EntryLocation { offset, length });
|
||||
}
|
||||
|
||||
// Read all unknown_data1
|
||||
|
@ -152,7 +174,11 @@ impl Pak {
|
|||
// Read all entry data
|
||||
debug!("Creating entry list");
|
||||
let mut entries: Vec<Entry> = Vec::new();
|
||||
for (i, offset_info) in offsets.iter().enumerate().take(header.entry_count() as usize) {
|
||||
for (i, offset_info) in offsets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.take(header.entry_count() as usize)
|
||||
{
|
||||
debug!("Seeking to block {}", offset_info.offset);
|
||||
// Seek to and read the entry data
|
||||
input
|
||||
|
@ -209,18 +235,16 @@ impl Pak {
|
|||
}
|
||||
|
||||
/// Encode a PAK file into a byte stream.
|
||||
pub fn encode<T: Write>(
|
||||
&self,
|
||||
mut output: &mut T
|
||||
) -> Result<(), PakError> {
|
||||
pub fn encode<T: Write>(&self, mut output: &mut T) -> Result<(), PakError> {
|
||||
self.header.write_into(&mut output)?;
|
||||
|
||||
// Write unknown data
|
||||
output.write_all(
|
||||
&self.unknown_pre_data
|
||||
&self
|
||||
.unknown_pre_data
|
||||
.iter()
|
||||
.flat_map(|dw| dw.to_le_bytes())
|
||||
.collect::<Vec<u8>>()
|
||||
.collect::<Vec<u8>>(),
|
||||
)?;
|
||||
|
||||
// Write offsets and lengths
|
||||
|
@ -239,15 +263,11 @@ impl Pak {
|
|||
// Write names if the flags indicate it should have them
|
||||
if self.header.flags().has_names() {
|
||||
if let Some(subdir) = &self.subdirectory {
|
||||
output.write_all(
|
||||
CString::new(subdir.as_bytes()).unwrap().to_bytes_with_nul()
|
||||
)?;
|
||||
output.write_all(CString::new(subdir.as_bytes()).unwrap().to_bytes_with_nul())?;
|
||||
}
|
||||
for entry in self.entries() {
|
||||
let name = entry.name.as_ref().unwrap();
|
||||
output.write_all(
|
||||
CString::new(name.as_bytes()).unwrap().to_bytes_with_nul()
|
||||
)?;
|
||||
output.write_all(CString::new(name.as_bytes()).unwrap().to_bytes_with_nul())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,8 +277,12 @@ impl Pak {
|
|||
|
||||
for entry in self.entries() {
|
||||
//let block_size = entry.data.len().div_ceil(self.header().block_size as usize);
|
||||
let mut remainder = 2048 - entry.data.len().rem_euclid(self.header().block_size as usize);
|
||||
if remainder == 2048 {
|
||||
let mut remainder = self.header().block_size as usize
|
||||
- entry
|
||||
.data
|
||||
.len()
|
||||
.rem_euclid(self.header().block_size as usize);
|
||||
if remainder == self.header().block_size as usize {
|
||||
remainder = 0;
|
||||
}
|
||||
output.write_all(&entry.data)?;
|
||||
|
@ -278,11 +302,7 @@ impl Pak {
|
|||
///
|
||||
/// This function updates the offsets of all entries to fit within the
|
||||
/// chunk size specified in the header.
|
||||
pub fn replace(
|
||||
&mut self,
|
||||
index: usize,
|
||||
replacement_bytes: &[u8],
|
||||
) -> Result<(), PakError> {
|
||||
pub fn replace(&mut self, index: usize, replacement_bytes: &[u8]) -> Result<(), PakError> {
|
||||
let block_size = self.header().block_size();
|
||||
|
||||
let replaced_entry;
|
||||
|
@ -290,7 +310,7 @@ impl Pak {
|
|||
replaced_entry = entry
|
||||
} else {
|
||||
log::error!("Entry {} not found!", index);
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
if let Some(name) = replaced_entry.name() {
|
||||
|
@ -304,8 +324,7 @@ impl Pak {
|
|||
replaced_entry.length = replaced_entry.data.len() as u32;
|
||||
|
||||
// Get the offset of the next entry based on the current one
|
||||
let mut next_offset =
|
||||
replaced_entry.offset + replaced_entry.length.div_ceil(block_size);
|
||||
let mut next_offset = replaced_entry.offset + replaced_entry.length.div_ceil(block_size);
|
||||
|
||||
// Update the position of all subsequent entries
|
||||
let mut i = 0;
|
||||
|
@ -333,7 +352,7 @@ impl Pak {
|
|||
let index = if let Some(entry) = entry {
|
||||
entry.index
|
||||
} else {
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
self.replace(index, replacement_bytes)?;
|
||||
|
@ -341,16 +360,12 @@ impl Pak {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn replace_by_id(
|
||||
&mut self,
|
||||
id: u32,
|
||||
replacement_bytes: &[u8],
|
||||
) -> Result<(), PakError> {
|
||||
pub fn replace_by_id(&mut self, id: u32, replacement_bytes: &[u8]) -> Result<(), PakError> {
|
||||
let entry = self.get_entry_by_id(id);
|
||||
let index = if let Some(entry) = entry {
|
||||
entry.index
|
||||
} else {
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
self.replace(index, replacement_bytes)?;
|
||||
|
@ -369,16 +384,13 @@ impl Pak {
|
|||
|
||||
/// Get an individual entry from the PAK by its ID
|
||||
pub fn get_entry_by_id(&mut self, id: u32) -> Option<&mut Entry> {
|
||||
self.entries
|
||||
.get_mut((id - self.header.id_start) as usize)
|
||||
self.entries.get_mut((id - self.header.id_start) as usize)
|
||||
}
|
||||
|
||||
pub fn get_entry_by_name(&mut self, name: &str) -> Option<&mut Entry> {
|
||||
self.entries
|
||||
.iter_mut()
|
||||
.find(|e|
|
||||
e.name.as_ref().is_some_and(|n| n == name)
|
||||
)
|
||||
.find(|e| e.name.as_ref().is_some_and(|n| n == name))
|
||||
}
|
||||
|
||||
/// Get a list of all entries from the PAK
|
||||
|
@ -390,8 +402,7 @@ impl Pak {
|
|||
pub fn contains_name(&self, name: &str) -> bool {
|
||||
self.entries
|
||||
.iter()
|
||||
.any(|e| e.name.as_ref()
|
||||
.is_some_and(|n| n == name))
|
||||
.any(|e| e.name.as_ref().is_some_and(|n| n == name))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
14
luca_script/Cargo.toml
Normal file
14
luca_script/Cargo.toml
Normal file
|
@ -0,0 +1,14 @@
|
|||
[package]
|
||||
name = "luca_script"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors.workspace = true
|
||||
|
||||
[dependencies]
|
||||
byteorder-lite = "0.1.0"
|
||||
encoding_rs = "0.8.35"
|
||||
serde = { version = "1.0.215", features = ["derive"] }
|
||||
serde_json = "1.0.133"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
107
luca_script/src/LBEE_opcodes
Normal file
107
luca_script/src/LBEE_opcodes
Normal file
|
@ -0,0 +1,107 @@
|
|||
EQU
|
||||
EQUN
|
||||
EQUV
|
||||
ADD
|
||||
SUB
|
||||
MUL
|
||||
DIV
|
||||
MOD
|
||||
AND
|
||||
OR
|
||||
RANDOM
|
||||
VARSTR
|
||||
SET
|
||||
FLAGCLR
|
||||
GOTO
|
||||
ONGOTO
|
||||
GOSUB
|
||||
IFY
|
||||
IFN
|
||||
RETURN
|
||||
JUMP
|
||||
FARCALL
|
||||
FARRETURN
|
||||
JUMPPOINT
|
||||
END
|
||||
VARSTR_SET
|
||||
TALKNAME_SET
|
||||
ARFLAGSET
|
||||
COLORBG_SET
|
||||
SPLINE_SET
|
||||
SHAKELIST_SET
|
||||
MESSAGE
|
||||
MESSAGE_CLEAR
|
||||
SELECT
|
||||
CLOSE_WINDOW
|
||||
LOG
|
||||
LOG_PAUSE
|
||||
LOG_END
|
||||
VOICE
|
||||
WAIT_COUNT
|
||||
WAIT_TIME
|
||||
FFSTOP
|
||||
INIT
|
||||
STOP
|
||||
IMAGELOAD
|
||||
IMAGEUPADTE
|
||||
ARC
|
||||
MOVE
|
||||
MOVE2
|
||||
ROT
|
||||
PEND
|
||||
FADE
|
||||
SCALE
|
||||
SHAKE
|
||||
SHAKELIST
|
||||
BASE
|
||||
MCMOVE
|
||||
MCARC
|
||||
MCROT
|
||||
MCSHAKE
|
||||
MCFADE
|
||||
WAIT
|
||||
DRAW
|
||||
WIPE
|
||||
FRAMEON
|
||||
FRAMEOFF
|
||||
FW
|
||||
SCISSOR
|
||||
DELAY
|
||||
RASTER
|
||||
TONE
|
||||
SCALECOSSIN
|
||||
BMODE
|
||||
SIZE
|
||||
SPLINE
|
||||
DISP
|
||||
MASK
|
||||
SG_QUAKE
|
||||
BGM
|
||||
BGM_WAITSTART
|
||||
BGM_WAITFADE
|
||||
SE
|
||||
SE_STOP
|
||||
SE_WAIT
|
||||
VOLUME
|
||||
MOVIE
|
||||
SETCGFLAG
|
||||
EX
|
||||
TROPHY
|
||||
SETBGMFLAG
|
||||
TASK
|
||||
BTFUNC
|
||||
BATTLE
|
||||
KOEP
|
||||
BT_ACCESSORY_SELECT
|
||||
UNDO_CLEAR
|
||||
PTFUNC
|
||||
PT
|
||||
GMFUNC
|
||||
GM
|
||||
DEL_CALLSTACK
|
||||
FULLQUAKE_ZOOM
|
||||
LBFUNC
|
||||
LBBG
|
||||
HAIKEI_SET
|
||||
SAYAVOICETEXT
|
||||
UNKNOWN
|
582
luca_script/src/main.rs
Normal file
582
luca_script/src/main.rs
Normal file
|
@ -0,0 +1,582 @@
|
|||
mod utils;
|
||||
|
||||
use std::{fs, io::{Cursor, Read, Write}, sync::LazyLock};
|
||||
|
||||
use byteorder_lite::{WriteBytesExt, ReadBytesExt, LE};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utils::Encoding;
|
||||
|
||||
static OPCODES: LazyLock<Vec<String>> = LazyLock::new(|| fs::read_to_string("LBEE_opcodes")
|
||||
.unwrap()
|
||||
.split("\n")
|
||||
.map(|s| s.to_owned())
|
||||
.collect()
|
||||
);
|
||||
|
||||
fn main() {
|
||||
let mut script_file = fs::File::open("SEEN0513").unwrap();
|
||||
let script_len = script_file.metadata().unwrap().len();
|
||||
let script = decode_script(&mut script_file, script_len);
|
||||
|
||||
/*
|
||||
for c in script.opcodes {
|
||||
print!("{:>5}", c.position);
|
||||
print!("{:>12}: ", c.string);
|
||||
if let Some(o) = c.opcode_specifics {
|
||||
print!("{}", serde_json::ser::to_string(&o).unwrap());
|
||||
} else if let Some(r) = c.fixed_param {
|
||||
print!("{:?}", r);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
*/
|
||||
|
||||
//println!("{}", serde_json::ser::to_string_pretty(&script).unwrap());
|
||||
|
||||
let mut rewrite_script = fs::File::create("SEEN0513-rewritten").unwrap();
|
||||
write_script(&mut rewrite_script, script).unwrap();
|
||||
println!("Wrote out successfully");
|
||||
}
|
||||
|
||||
fn decode_script<S: Read>(script_stream: &mut S, length: u64) -> Script {
|
||||
let mut opcodes = Vec::new();
|
||||
let mut offset = 0;
|
||||
let mut i = 0;
|
||||
let mut pos = 0;
|
||||
while offset < length as usize {
|
||||
// Read all base info
|
||||
let length = script_stream.read_u16::<LE>().unwrap() as usize;
|
||||
let number = script_stream.read_u8().unwrap();
|
||||
let flag = script_stream.read_u8().unwrap();
|
||||
let string = OPCODES[number as usize].clone();
|
||||
|
||||
offset += 4;
|
||||
|
||||
let raw_len = length - 4;
|
||||
let mut raw_bytes = vec![0u8; raw_len];
|
||||
script_stream.read_exact(&mut raw_bytes).unwrap();
|
||||
offset += raw_len;
|
||||
|
||||
// Read extra align byte if alignment needed
|
||||
if length % 2 != 0 {
|
||||
offset += 1;
|
||||
Some(script_stream.read_u8().unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut fixed_param = Vec::new();
|
||||
let param_bytes = match flag {
|
||||
0 => raw_bytes.clone(),
|
||||
f if f < 2 => {
|
||||
fixed_param = vec![
|
||||
u16::from_le_bytes(raw_bytes[..2].try_into().unwrap()),
|
||||
];
|
||||
raw_bytes[2..].to_vec()
|
||||
}
|
||||
_ => {
|
||||
fixed_param = vec![
|
||||
u16::from_le_bytes(raw_bytes[..2].try_into().unwrap()),
|
||||
u16::from_le_bytes(raw_bytes[2..4].try_into().unwrap()),
|
||||
];
|
||||
raw_bytes[4..].to_vec()
|
||||
}
|
||||
};
|
||||
|
||||
opcodes.push(Opcode {
|
||||
index: i,
|
||||
position: pos,
|
||||
length,
|
||||
opcode_number: number,
|
||||
string: string.clone(),
|
||||
flag,
|
||||
fixed_param,
|
||||
opcode_specifics: SpecificOpcode::decode(&string, ¶m_bytes),
|
||||
param_bytes,
|
||||
});
|
||||
|
||||
// Break if END opcode reached
|
||||
if &string == "END" {
|
||||
break;
|
||||
}
|
||||
|
||||
pos += (length + 1) & !1;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Script {
|
||||
code_count: opcodes.len(),
|
||||
opcodes,
|
||||
}
|
||||
}
|
||||
|
||||
fn write_script<W: Write>(script_output: &mut W, script: Script) -> Result<(), ()> {
|
||||
let mut position = 0;
|
||||
for opcode in script.opcodes {
|
||||
let mut total = 0;
|
||||
script_output.write_u16::<LE>(opcode.length as u16).unwrap();
|
||||
script_output.write_u8(OPCODES.iter().position(|l| *l == opcode.string).unwrap() as u8).unwrap();
|
||||
script_output.write_u8(opcode.flag).unwrap();
|
||||
total += 4;
|
||||
|
||||
for p in opcode.fixed_param {
|
||||
script_output.write_u16::<LE>(p).unwrap();
|
||||
total += 2;
|
||||
}
|
||||
|
||||
script_output.write_all(&opcode.param_bytes).unwrap();
|
||||
total += opcode.param_bytes.len();
|
||||
if (position + total) % 2 != 0 {
|
||||
script_output.write_u8(0).unwrap();
|
||||
total += 1;
|
||||
}
|
||||
position += total;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Script {
|
||||
opcodes: Vec<Opcode>,
|
||||
code_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Opcode {
|
||||
index: usize,
|
||||
position: usize,
|
||||
length: usize,
|
||||
opcode_number: u8,
|
||||
string: String,
|
||||
|
||||
flag: u8,
|
||||
fixed_param: Vec<u16>,
|
||||
param_bytes: Vec<u8>,
|
||||
opcode_specifics: Option<SpecificOpcode>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SpecificOpcode {
|
||||
Message {
|
||||
voice_id: u16,
|
||||
messages: Vec<String>,
|
||||
end: Vec<u8>,
|
||||
},
|
||||
Add {
|
||||
var1: u16,
|
||||
expr: String,
|
||||
},
|
||||
EquN {
|
||||
var1: u16,
|
||||
value: Option<u16>, //?
|
||||
},
|
||||
Select {
|
||||
var_id: u16,
|
||||
var0: u16,
|
||||
var1: u16,
|
||||
var2: u16,
|
||||
messages: Vec<String>,
|
||||
var3: u16,
|
||||
var4: u16,
|
||||
var5: u16,
|
||||
},
|
||||
_Battle,
|
||||
Task {
|
||||
task_type: u16,
|
||||
var1: Option<u16>,
|
||||
var2: Option<u16>,
|
||||
var3: Option<u16>,
|
||||
var4: Option<u16>,
|
||||
message_1: Option<Vec<String>>,
|
||||
message_2: Option<Vec<String>>,
|
||||
raw_args: Option<Vec<u8>>,
|
||||
},
|
||||
SayAVoiceText {
|
||||
voice_id: u16,
|
||||
messages: Vec<String>,
|
||||
},
|
||||
VarStrSet {
|
||||
varstr_id: u16,
|
||||
varstr_str: String,
|
||||
},
|
||||
GoTo {
|
||||
jump_pos: u32,
|
||||
},
|
||||
GoSub {
|
||||
arg1: u16,
|
||||
jump_pos: u32,
|
||||
end: Vec<u8>,
|
||||
},
|
||||
Jump {
|
||||
filename: String,
|
||||
jump_pos: Option<u32>,
|
||||
},
|
||||
FarCall {
|
||||
index: u16,
|
||||
filename: String,
|
||||
jump_pos: u32,
|
||||
end: Vec<u8>,
|
||||
},
|
||||
IfN {
|
||||
condition: String,
|
||||
jump_pos: u32,
|
||||
},
|
||||
IfY {
|
||||
condition: String,
|
||||
jump_pos: u32,
|
||||
},
|
||||
Random {
|
||||
var1: u16,
|
||||
rnd_from: String,
|
||||
rnd_to: String,
|
||||
},
|
||||
ImageLoad {
|
||||
mode: u16,
|
||||
image_id: u16,
|
||||
var1: Option<u16>,
|
||||
pos_x: Option<u16>,
|
||||
pos_y: Option<u16>,
|
||||
end: Vec<u8>,
|
||||
},
|
||||
Bgm {
|
||||
bgm_id: u32,
|
||||
arg2: Option<u16>,
|
||||
},
|
||||
Unknown(Vec<u8>),
|
||||
}
|
||||
|
||||
impl SpecificOpcode {
|
||||
pub fn decode(opcode_str: &str, param_bytes: &[u8]) -> Option<Self> {
|
||||
if param_bytes.is_empty() {
|
||||
return None
|
||||
}
|
||||
|
||||
let mut cursor_param = Cursor::new(param_bytes);
|
||||
|
||||
Some(match opcode_str {
|
||||
"MESSAGE" => Self::decode_message(&mut cursor_param),
|
||||
"SAYAVOICETEXT" => Self::decode_sayavoicetext(&mut cursor_param),
|
||||
"SELECT" => Self::decode_select(&mut cursor_param),
|
||||
"TASK" => Self::decode_task(&mut cursor_param),
|
||||
|
||||
"ADD" => Self::decode_add(&mut cursor_param),
|
||||
"EQUN" => Self::decode_equn(&mut cursor_param),
|
||||
|
||||
"RANDOM" => Self::decode_random(&mut cursor_param),
|
||||
"IFY" => Self::decode_ifn_ify(&mut cursor_param, false),
|
||||
"IFN" => Self::decode_ifn_ify(&mut cursor_param, true),
|
||||
"JUMP" => Self::decode_jump(&mut cursor_param),
|
||||
"GOTO" => Self::decode_goto(&mut cursor_param),
|
||||
"GOSUB" => Self::decode_gosub(&mut cursor_param),
|
||||
"FARCALL" => Self::decode_farcall(&mut cursor_param),
|
||||
"VARSTR_SET" => Self::decode_varstr_set(&mut cursor_param),
|
||||
|
||||
"IMAGELOAD" => Self::decode_imageload(&mut cursor_param),
|
||||
"BGM" => Self::decode_bgm(&mut cursor_param),
|
||||
_ => Self::Unknown(param_bytes.to_vec())
|
||||
})
|
||||
}
|
||||
|
||||
fn decode_message<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let voice_id = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
// TODO: This will need to change per-game based on the number of
|
||||
// languages and their encodings
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
|
||||
let mut end = Vec::new();
|
||||
param_bytes.read_to_end(&mut end).unwrap();
|
||||
|
||||
Self::Message {
|
||||
voice_id,
|
||||
messages,
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_add<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let var1 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let expr = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
|
||||
Self::Add { var1, expr }
|
||||
}
|
||||
|
||||
fn decode_equn<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let var1 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let value = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
Self::EquN { var1, value }
|
||||
}
|
||||
|
||||
fn decode_select<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let var_id = param_bytes.read_u16::<LE>().unwrap();
|
||||
let var0 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let var1 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let var2 = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
// TODO: This will need to change per-game based on the number of
|
||||
// languages and their encodings
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
|
||||
let var3 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let var4 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let var5 = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
Self::Select {
|
||||
var_id,
|
||||
var0,
|
||||
var1,
|
||||
var2,
|
||||
messages,
|
||||
var3,
|
||||
var4,
|
||||
var5
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_random<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let var1 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let rnd_from = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
let rnd_to = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
|
||||
Self::Random { var1, rnd_from, rnd_to }
|
||||
}
|
||||
|
||||
fn decode_ifn_ify<R: Read>(param_bytes: &mut R, ifn: bool) -> Self {
|
||||
let condition = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
let jump_pos = param_bytes.read_u32::<LE>().unwrap();
|
||||
|
||||
if ifn {
|
||||
Self::IfN { condition, jump_pos }
|
||||
} else {
|
||||
Self::IfY { condition, jump_pos }
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_jump<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let filename = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
|
||||
let jump_pos = param_bytes.read_u32::<LE>().ok();
|
||||
|
||||
Self::Jump { filename, jump_pos }
|
||||
}
|
||||
|
||||
fn decode_imageload<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let mode = param_bytes.read_u16::<LE>().unwrap();
|
||||
let image_id = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
// These will only be read if there is anything to be read
|
||||
let var1 = param_bytes.read_u16::<LE>().ok();
|
||||
let pos_x = param_bytes.read_u16::<LE>().ok();
|
||||
let pos_y = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
let mut end = Vec::new();
|
||||
param_bytes.read_to_end(&mut end).unwrap();
|
||||
|
||||
Self::ImageLoad {
|
||||
mode,
|
||||
image_id,
|
||||
var1,
|
||||
pos_x,
|
||||
pos_y,
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_goto<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let jump_pos = param_bytes.read_u32::<LE>().unwrap();
|
||||
|
||||
Self::GoTo { jump_pos }
|
||||
}
|
||||
|
||||
fn decode_gosub<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let arg1 = param_bytes.read_u16::<LE>().unwrap();
|
||||
let jump_pos = param_bytes.read_u32::<LE>().unwrap();
|
||||
|
||||
let mut end = Vec::new();
|
||||
param_bytes.read_to_end(&mut end).unwrap();
|
||||
|
||||
Self::GoSub {
|
||||
arg1,
|
||||
jump_pos,
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_varstr_set<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let varstr_id = param_bytes.read_u16::<LE>().unwrap();
|
||||
let varstr_str = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
|
||||
Self::VarStrSet { varstr_id, varstr_str }
|
||||
}
|
||||
|
||||
fn decode_farcall<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let index = param_bytes.read_u16::<LE>().unwrap();
|
||||
let filename = utils::decode_string_v1(param_bytes, Encoding::ShiftJIS).unwrap();
|
||||
let jump_pos = param_bytes.read_u32::<LE>().unwrap();
|
||||
|
||||
let mut end = Vec::new();
|
||||
param_bytes.read_to_end(&mut end).unwrap();
|
||||
|
||||
Self::FarCall {
|
||||
index,
|
||||
filename,
|
||||
jump_pos,
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_sayavoicetext<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let voice_id = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
// TODO: This will need to change per-game based on the number of
|
||||
// languages and their encodings
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
|
||||
Self::SayAVoiceText {
|
||||
voice_id,
|
||||
messages,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_bgm<R: Read>(param_bytes: &mut R) -> Self {
|
||||
// TODO: invesigate the accuracy of this
|
||||
let bgm_id = param_bytes.read_u32::<LE>().unwrap();
|
||||
|
||||
let arg2 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
Self::Bgm {
|
||||
bgm_id,
|
||||
arg2,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_task<R: Read>(param_bytes: &mut R) -> Self {
|
||||
let task_type = param_bytes.read_u16::<LE>().unwrap();
|
||||
|
||||
let mut var1 = None;
|
||||
let mut var2 = None;
|
||||
let mut var3 = None;
|
||||
let mut var4 = None;
|
||||
let mut message_1 = None;
|
||||
let mut message_2 = None;
|
||||
let raw_args: Option<Vec<u8>> = None;
|
||||
|
||||
if false {
|
||||
return Self::Task { task_type, var1, var2, var3, var4, message_1, message_2, raw_args };
|
||||
}
|
||||
|
||||
match task_type {
|
||||
4 => {
|
||||
let var1 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
if false {
|
||||
return Self::Task { task_type, var1, var2, var3, var4, message_1, message_2, raw_args };
|
||||
}
|
||||
|
||||
if [0, 4, 5].contains(&var1.unwrap()) {
|
||||
var2 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_1 = Some(messages);
|
||||
} else if var1.unwrap() == 1 {
|
||||
var2 = param_bytes.read_u16::<LE>().ok();
|
||||
var3 = param_bytes.read_u16::<LE>().ok();
|
||||
var4 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
// Get first set of messages
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_1 = Some(messages);
|
||||
|
||||
// Get second set of messages
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_2 = Some(messages);
|
||||
} else if var1.unwrap() == 6 {
|
||||
var2 = param_bytes.read_u16::<LE>().ok();
|
||||
var3 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_1 = Some(messages);
|
||||
} else {
|
||||
return Self::Task { task_type, var1, var2, var3, var4, message_1, message_2, raw_args };
|
||||
}
|
||||
}
|
||||
54 => {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
message_1 = Some(vec![string]);
|
||||
}
|
||||
69 => {
|
||||
var1 = param_bytes.read_u16::<LE>().ok();
|
||||
|
||||
// Get first set of messages
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_1 = Some(messages);
|
||||
|
||||
// Get second set of messages
|
||||
let mut messages = Vec::new();
|
||||
for _ in 0..2 {
|
||||
let string = utils::decode_string_v1(param_bytes, Encoding::UTF16).unwrap();
|
||||
messages.push(string);
|
||||
}
|
||||
message_2 = Some(messages);
|
||||
}
|
||||
_ => return Self::Task {
|
||||
task_type,
|
||||
var1,
|
||||
var2,
|
||||
var3,
|
||||
var4,
|
||||
message_1,
|
||||
message_2,
|
||||
raw_args,
|
||||
}
|
||||
}
|
||||
|
||||
Self::Task {
|
||||
task_type,
|
||||
var1,
|
||||
var2,
|
||||
var3,
|
||||
var4,
|
||||
message_1,
|
||||
message_2,
|
||||
raw_args,
|
||||
}
|
||||
}
|
||||
}
|
70
luca_script/src/utils.rs
Normal file
70
luca_script/src/utils.rs
Normal file
|
@ -0,0 +1,70 @@
|
|||
use std::{error::Error, io::{Read, Write}};
|
||||
|
||||
use encoding_rs::*;
|
||||
use byteorder_lite::{LE, ReadBytesExt};
|
||||
|
||||
pub enum Encoding {
|
||||
#[allow(dead_code)]
|
||||
UTF8,
|
||||
UTF16,
|
||||
ShiftJIS,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl Encoding {
|
||||
pub fn width(&self) -> usize {
|
||||
match self {
|
||||
Self::UTF8 | Self::ShiftJIS => 1,
|
||||
Self::UTF16 => 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decode_string_v1<R: Read>(
|
||||
input: &mut R,
|
||||
format: Encoding,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
|
||||
// Find the end of the string
|
||||
let mut string_buf = Vec::new();
|
||||
match format {
|
||||
Encoding::UTF8 | Encoding::ShiftJIS => {
|
||||
let mut string_byte = input.read_u8()?;
|
||||
while string_byte != 0 {
|
||||
string_buf.push(string_byte);
|
||||
string_byte = input.read_u8()?;
|
||||
}
|
||||
},
|
||||
Encoding::UTF16 => {
|
||||
let mut string_u16 = input.read_u16::<LE>()?;
|
||||
while string_u16 != 0 {
|
||||
string_buf.write_all(&string_u16.to_le_bytes()).unwrap();
|
||||
string_u16 = input.read_u16::<LE>()?;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Get the actual string data using the proper decoder
|
||||
let string = match format {
|
||||
Encoding::UTF8 => String::from_utf8(string_buf)?,
|
||||
Encoding::UTF16 => {
|
||||
String::from_utf16(
|
||||
&string_buf.chunks_exact(2)
|
||||
.map(|e| u16::from_le_bytes(e.try_into().unwrap()))
|
||||
.collect::<Vec<u16>>()
|
||||
)?
|
||||
}
|
||||
Encoding::ShiftJIS => SHIFT_JIS.decode(&string_buf).0.to_string(),
|
||||
};
|
||||
|
||||
Ok(string)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn encode_string_v1(string: String, format: Encoding) -> Vec<u8> {
|
||||
match format {
|
||||
Encoding::UTF8 => string.as_bytes().to_vec(),
|
||||
Encoding::UTF16 => string.encode_utf16().flat_map(|b| b.to_le_bytes()).collect(),
|
||||
Encoding::ShiftJIS => SHIFT_JIS.encode(&string).0.to_vec(),
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "pak_explorer"
|
||||
edition = "2021"
|
||||
version = "0.1.1"
|
||||
edition = "2024"
|
||||
version = "0.1.3"
|
||||
description = """
|
||||
A simple GUI for exploring and making modifications to LUCA System PAK files.
|
||||
"""
|
||||
|
@ -10,11 +10,16 @@ authors.workspace = true
|
|||
publish = false
|
||||
|
||||
[dependencies]
|
||||
cz = { path = "../cz/", features = ["png"] }
|
||||
eframe = { version = "0.28.1", default-features = false, features = ["wayland", "x11", "accesskit", "default_fonts", "wgpu"] }
|
||||
egui_extras = "0.28.1"
|
||||
colog = "1.3"
|
||||
cz = { path = "../cz/" }
|
||||
eframe = { version = "0.29", default-features = false, features = ["wayland", "x11", "accesskit", "default_fonts", "wgpu"] }
|
||||
egui_extras = "0.29"
|
||||
image = { version = "0.25", default-features = false, features = ["png"] }
|
||||
kira = "0.10"
|
||||
log = "0.4"
|
||||
luca_pak = { path = "../luca_pak/" }
|
||||
rfd = "0.14.1"
|
||||
rfd = "0.15"
|
||||
symphonia = "0.5.4"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -1,24 +1,40 @@
|
|||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release
|
||||
|
||||
use std::fs;
|
||||
|
||||
use eframe::egui::{self, ColorImage, Image, TextureFilter, TextureHandle, TextureOptions};
|
||||
use eframe::egui::{
|
||||
self, ColorImage, Image, ProgressBar, TextureFilter, TextureHandle, TextureOptions, ThemePreference
|
||||
};
|
||||
use kira::{sound::static_sound::{StaticSoundData, StaticSoundHandle}, AudioManager, AudioManagerSettings, DefaultBackend, Tween};
|
||||
use log::error;
|
||||
use luca_pak::{entry::EntryType, Pak};
|
||||
use std::{fs, io::Cursor, time::Duration};
|
||||
|
||||
fn main() -> eframe::Result {
|
||||
colog::default_builder()
|
||||
.filter(None, log::LevelFilter::Warn)
|
||||
.init();
|
||||
|
||||
let options = eframe::NativeOptions {
|
||||
viewport: egui::ViewportBuilder::default().with_inner_size([1024.0, 800.0]),
|
||||
follow_system_theme: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let manager = AudioManager::<DefaultBackend>::new(AudioManagerSettings::default()).unwrap();
|
||||
|
||||
eframe::run_native(
|
||||
"LUCA PAK Explorer",
|
||||
options,
|
||||
Box::new(|cc| {
|
||||
// This gives us image support:
|
||||
egui_extras::install_image_loaders(&cc.egui_ctx);
|
||||
|
||||
Ok(Box::<PakExplorer>::default())
|
||||
Box::new(|ctx| {
|
||||
let ppp = ctx.egui_ctx.pixels_per_point() * 1.5;
|
||||
ctx.egui_ctx.set_pixels_per_point(ppp);
|
||||
Ok(Box::new(PakExplorer {
|
||||
open_file: None,
|
||||
selected_entry: None,
|
||||
image_texture: None,
|
||||
hex_string: None,
|
||||
audio_player: manager,
|
||||
audio_handle: None,
|
||||
audio_duration: None,
|
||||
}))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
@ -28,40 +44,45 @@ struct PakExplorer {
|
|||
selected_entry: Option<luca_pak::entry::Entry>,
|
||||
image_texture: Option<egui::TextureHandle>,
|
||||
hex_string: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Default for PakExplorer {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
open_file: None,
|
||||
selected_entry: None,
|
||||
image_texture: None,
|
||||
hex_string: None,
|
||||
}
|
||||
}
|
||||
audio_player: AudioManager,
|
||||
audio_handle: Option<StaticSoundHandle>,
|
||||
audio_duration: Option<Duration>,
|
||||
}
|
||||
|
||||
impl eframe::App for PakExplorer {
|
||||
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
|
||||
egui::CentralPanel::default().show(ctx, |ui| {
|
||||
ctx.set_pixels_per_point(1.5);
|
||||
ui.heading("PAK File Explorer");
|
||||
ctx.options_mut(|o| o.theme_preference = ThemePreference::System);
|
||||
|
||||
ui.horizontal(|ui| {
|
||||
if ui.button("Open file…").clicked() {
|
||||
if ui.button("Open file").clicked() {
|
||||
if let Some(path) = rfd::FileDialog::new().pick_file() {
|
||||
let pak = Pak::open(&path).unwrap();
|
||||
self.open_file = Some(pak);
|
||||
let pak = match Pak::open(&path) {
|
||||
Ok(pak) => Some(pak),
|
||||
Err(e) => {
|
||||
error!("Unable to read selected file as PAK: {}", e);
|
||||
None
|
||||
}
|
||||
};
|
||||
self.open_file = pak;
|
||||
self.selected_entry = None;
|
||||
self.image_texture = None;
|
||||
self.hex_string = None;
|
||||
|
||||
if let Some(a) = self.audio_handle.as_mut() {
|
||||
a.stop(Tween::default());
|
||||
}
|
||||
|
||||
self.audio_handle = None;
|
||||
self.audio_duration = None;
|
||||
}
|
||||
}
|
||||
if let Some(pak) = &self.open_file {
|
||||
if ui.button("Save PAK…").clicked() {
|
||||
if ui.button("Save PAK").clicked() {
|
||||
if let Some(path) = rfd::FileDialog::new()
|
||||
.set_file_name(pak.path().file_name().unwrap().to_string_lossy())
|
||||
.save_file()
|
||||
.set_file_name(pak.path().file_name().unwrap().to_string_lossy())
|
||||
.save_file()
|
||||
{
|
||||
pak.save(&path).unwrap();
|
||||
}
|
||||
|
@ -72,7 +93,10 @@ impl eframe::App for PakExplorer {
|
|||
ui.separator();
|
||||
|
||||
if let Some(pak) = &self.open_file {
|
||||
ui.label(format!("Opened {}", pak.path().file_name().unwrap().to_string_lossy()));
|
||||
ui.label(format!(
|
||||
"Opened {}",
|
||||
pak.path().file_name().unwrap().to_string_lossy()
|
||||
));
|
||||
ui.label(format!("Contains {} Entries", pak.entries().len()));
|
||||
|
||||
let selection = if let Some(entry) = &self.selected_entry {
|
||||
|
@ -81,31 +105,44 @@ impl eframe::App for PakExplorer {
|
|||
"None".to_string()
|
||||
};
|
||||
|
||||
egui::ComboBox::from_id_source("my-combobox")
|
||||
.selected_text(selection)
|
||||
.truncate()
|
||||
.show_ui(ui, |ui|
|
||||
{
|
||||
ui.selectable_value(&mut self.selected_entry, None, "");
|
||||
for entry in pak.entries() {
|
||||
if ui.selectable_value(
|
||||
&mut self.selected_entry,
|
||||
Some(entry.clone()),
|
||||
entry.display_name(),
|
||||
).clicked() {
|
||||
self.image_texture = None;
|
||||
};
|
||||
ui.horizontal(|ui| {
|
||||
egui::ComboBox::from_id_salt("my-combobox")
|
||||
.selected_text(selection.clone())
|
||||
.truncate()
|
||||
.show_ui(ui, |ui| {
|
||||
ui.selectable_value(&mut self.selected_entry, None, "");
|
||||
for entry in pak.entries() {
|
||||
if ui
|
||||
.selectable_value(
|
||||
&mut self.selected_entry,
|
||||
Some(entry.clone()),
|
||||
format!("{} - {}", entry.display_name(), entry.id()),
|
||||
)
|
||||
.clicked()
|
||||
{
|
||||
self.image_texture = None;
|
||||
|
||||
if let Some(a) = self.audio_handle.as_mut() {
|
||||
a.stop(Tween::default());
|
||||
}
|
||||
|
||||
self.audio_handle = None;
|
||||
self.audio_duration = None;
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(entry) = &self.selected_entry {
|
||||
ui.label(format!("Index {}, ID {}", entry.index(), entry.id()));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("No File Opened")
|
||||
);
|
||||
ui.centered_and_justified(|ui| ui.label("No File Opened"));
|
||||
}
|
||||
|
||||
if let Some(entry) = &self.selected_entry {
|
||||
ui.horizontal(|ui| {
|
||||
if ui.button("Save entry…").clicked() {
|
||||
if ui.button("Save entry").clicked() {
|
||||
if let Some(path) = rfd::FileDialog::new()
|
||||
.set_file_name(entry.display_name())
|
||||
.save_file()
|
||||
|
@ -115,7 +152,7 @@ impl eframe::App for PakExplorer {
|
|||
}
|
||||
|
||||
if let Some(pak) = &mut self.open_file.as_mut() {
|
||||
if ui.button("Replace entry…").clicked() {
|
||||
if ui.button("Replace entry").clicked() {
|
||||
if let Some(path) = rfd::FileDialog::new().pick_file() {
|
||||
let file_bytes = fs::read(path).unwrap();
|
||||
pak.replace(entry.index(), &file_bytes).unwrap();
|
||||
|
@ -124,56 +161,110 @@ impl eframe::App for PakExplorer {
|
|||
}
|
||||
});
|
||||
match entry.file_type() {
|
||||
EntryType::CZ0 | EntryType::CZ1
|
||||
| EntryType::CZ2 | EntryType::CZ3
|
||||
| EntryType::CZ4 | EntryType::CZ5 =>
|
||||
{
|
||||
if ui.button("Save as PNG…").clicked() {
|
||||
EntryType::CZ0
|
||||
| EntryType::CZ1
|
||||
| EntryType::CZ2
|
||||
| EntryType::CZ3
|
||||
| EntryType::CZ4
|
||||
| EntryType::CZ5 => {
|
||||
if ui.button("Save as PNG").clicked() {
|
||||
let mut display_name = entry.display_name();
|
||||
display_name.push_str(".png");
|
||||
if let Some(path) = rfd::FileDialog::new()
|
||||
.set_file_name(display_name)
|
||||
.save_file()
|
||||
{
|
||||
let cz = cz::DynamicCz::decode(&mut std::io::Cursor::new(entry.as_bytes())).unwrap();
|
||||
cz.save_as_png(&path).unwrap();
|
||||
let cz =
|
||||
cz::CzFile::decode(&mut std::io::Cursor::new(entry.as_bytes()))
|
||||
.unwrap();
|
||||
image::save_buffer_with_format(
|
||||
path,
|
||||
cz.as_raw(),
|
||||
cz.header().width() as u32,
|
||||
cz.header().height() as u32,
|
||||
image::ColorType::Rgba8,
|
||||
image::ImageFormat::Png,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
ui.separator();
|
||||
|
||||
let texture: &TextureHandle = self.image_texture.get_or_insert_with(|| {
|
||||
let cz = cz::DynamicCz::decode(&mut std::io::Cursor::new(entry.as_bytes())).unwrap();
|
||||
let cz =
|
||||
cz::CzFile::decode(&mut std::io::Cursor::new(entry.as_bytes()))
|
||||
.unwrap();
|
||||
let image = ColorImage::from_rgba_unmultiplied(
|
||||
[cz.header().width() as usize, cz.header().height() as usize],
|
||||
cz.as_raw()
|
||||
cz.as_raw(),
|
||||
);
|
||||
ui.ctx().load_texture("eventframe", image, TextureOptions {
|
||||
magnification: TextureFilter::Nearest,
|
||||
minification: TextureFilter::Linear,
|
||||
..Default::default()
|
||||
})
|
||||
ui.ctx().load_texture(
|
||||
"eventframe",
|
||||
image,
|
||||
TextureOptions {
|
||||
magnification: TextureFilter::Nearest,
|
||||
minification: TextureFilter::Linear,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.centered_and_justified(|ui| {
|
||||
ui.add(
|
||||
Image::from_texture(texture)
|
||||
.show_loading_spinner(true)
|
||||
.shrink_to_fit()
|
||||
.rounding(2.0)
|
||||
.rounding(2.0),
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
EntryType::OGG
|
||||
| EntryType::OGGPAK
|
||||
| EntryType::WAV => {
|
||||
ui.separator();
|
||||
|
||||
ui.horizontal(|ui| {
|
||||
if ui.button("▶").clicked() && self.audio_handle.is_none() {
|
||||
let sound_data = StaticSoundData::from_cursor(
|
||||
Cursor::new(entry.cloned_bytes_fixed())
|
||||
)
|
||||
.unwrap()
|
||||
.volume(-8.0);
|
||||
|
||||
self.audio_duration = Some(sound_data.duration());
|
||||
self.audio_handle = Some(self.audio_player.play(sound_data.clone()).unwrap());
|
||||
}
|
||||
|
||||
if ui.button("⏹").clicked() && self.audio_handle.is_some() {
|
||||
self.audio_handle.as_mut().unwrap().stop(Tween::default());
|
||||
self.audio_handle = None;
|
||||
self.audio_duration = None;
|
||||
}
|
||||
|
||||
if let Some(a) = &self.audio_handle {
|
||||
let pos = a.position() as f32;
|
||||
|
||||
ui.add(ProgressBar::new(
|
||||
pos / self.audio_duration.as_ref().unwrap().as_secs_f32()
|
||||
).rounding(1.0).text(format!("{:02.0}:{:02.0}", pos / 60.0, pos % 60.0)));
|
||||
|
||||
if pos / self.audio_duration.as_ref().unwrap().as_secs_f32() > 0.99 {
|
||||
self.audio_handle.as_mut().unwrap().stop(Tween::default());
|
||||
self.audio_handle = None;
|
||||
self.audio_duration = None;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.request_repaint_after(Duration::from_millis(50));
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("No Preview Available")
|
||||
);
|
||||
},
|
||||
ui.centered_and_justified(|ui| ui.label("No Preview Available"));
|
||||
}
|
||||
}
|
||||
} else if self.open_file.is_some() {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("Select an Entry")
|
||||
);
|
||||
ui.centered_and_justified(|ui| ui.label("Select an Entry"));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
[package]
|
||||
name = "utils"
|
||||
version = "0.1.1"
|
||||
name = "lbee-utils"
|
||||
version = "0.2.1"
|
||||
edition = "2021"
|
||||
license = "GPL-3.0-or-later"
|
||||
authors.workspace = true
|
||||
build = "build.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "czutil"
|
||||
|
@ -12,11 +13,14 @@ name = "czutil"
|
|||
name = "pakutil"
|
||||
|
||||
[dependencies]
|
||||
cz = { path = "../cz/", features = ["png"] }
|
||||
cz = { path = "../cz/" }
|
||||
luca_pak = { path = "../luca_pak/" }
|
||||
|
||||
image = { version = "0.25", default-features = false, features = ["png"] }
|
||||
clap = { version = "4.5.9", features = ["derive"] }
|
||||
clap = { version = "4.5", features = ["derive", "error-context"] }
|
||||
owo-colors = "4.1"
|
||||
|
||||
[build-dependencies]
|
||||
vergen-gix = { version = "1.0", features = ["build", "cargo", "rustc", "si"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
23
utils/build.rs
Normal file
23
utils/build.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use vergen_gix::{BuildBuilder, CargoBuilder, Emitter, GixBuilder, RustcBuilder, SysinfoBuilder};
|
||||
|
||||
fn main() {
|
||||
let build = BuildBuilder::all_build().unwrap();
|
||||
let cargo = CargoBuilder::all_cargo().unwrap();
|
||||
let gitcl = GixBuilder::all_git().unwrap();
|
||||
let rustc = RustcBuilder::all_rustc().unwrap();
|
||||
let si = SysinfoBuilder::all_sysinfo().unwrap();
|
||||
|
||||
Emitter::default()
|
||||
.add_instructions(&build)
|
||||
.unwrap()
|
||||
.add_instructions(&cargo)
|
||||
.unwrap()
|
||||
.add_instructions(&gitcl)
|
||||
.unwrap()
|
||||
.add_instructions(&rustc)
|
||||
.unwrap()
|
||||
.add_instructions(&si)
|
||||
.unwrap()
|
||||
.emit()
|
||||
.unwrap();
|
||||
}
|
|
@ -1,19 +1,32 @@
|
|||
use clap::{error::ErrorKind, Error, Parser, Subcommand};
|
||||
use std::{fs, path::{Path, PathBuf}};
|
||||
use cz::{common::CzVersion, CzFile};
|
||||
use image::ColorType;
|
||||
use lbee_utils::version;
|
||||
use owo_colors::OwoColorize;
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::exit,
|
||||
};
|
||||
|
||||
/// Utility to maniuplate CZ image files from the LUCA System game engine by
|
||||
/// Prototype Ltd.
|
||||
#[derive(Parser)]
|
||||
#[command(name = "CZ Utility")]
|
||||
#[command(version, about, long_about = None)]
|
||||
#[command(author, version, about, long_about = None, disable_version_flag = true)]
|
||||
#[command(arg_required_else_help(true))]
|
||||
struct Cli {
|
||||
/// Show program version information
|
||||
#[arg(short('V'), long)]
|
||||
version: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
command: Option<Commands>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Converts a CZ file to a PNG
|
||||
/// Decode a CZ file to a PNG
|
||||
Decode {
|
||||
/// Decode a whole folder, and output to another folder
|
||||
#[arg(short, long)]
|
||||
|
@ -28,7 +41,26 @@ enum Commands {
|
|||
output: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Replace a CZ file's image data
|
||||
/// Encode a PNG file to a CZ
|
||||
Encode {
|
||||
/// Input image to encode
|
||||
#[arg(value_name = "INPUT")]
|
||||
input: PathBuf,
|
||||
|
||||
/// Output CZ file location
|
||||
#[arg(value_name = "OUTPUT")]
|
||||
output: PathBuf,
|
||||
|
||||
/// Output CZ file version
|
||||
#[arg(short, long, value_name = "CZ VERSION")]
|
||||
version: Option<u8>,
|
||||
|
||||
/// Output CZ file bit depth
|
||||
#[arg(short, long, value_name = "CZ BIT DEPTH")]
|
||||
depth: Option<u16>,
|
||||
},
|
||||
|
||||
/// Replace an existing CZ file's image data
|
||||
Replace {
|
||||
/// Replace a whole folder, and output to another folder,
|
||||
/// using a folder of replacements
|
||||
|
@ -60,36 +92,37 @@ enum Commands {
|
|||
fn main() {
|
||||
let cli = Cli::parse();
|
||||
|
||||
if cli.version {
|
||||
println!("{}", version(env!("CARGO_BIN_NAME")));
|
||||
exit(0);
|
||||
}
|
||||
|
||||
let command = match cli.command {
|
||||
Some(c) => c,
|
||||
None => exit(0),
|
||||
};
|
||||
|
||||
// Check what subcommand was run
|
||||
match &cli.command {
|
||||
match &command {
|
||||
Commands::Decode {
|
||||
input,
|
||||
output,
|
||||
batch,
|
||||
} => {
|
||||
if !input.exists() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"The input file/folder provided does not exist\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("The input file/folder provided does not exist");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if *batch {
|
||||
if input.is_file() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"Batch input must be a directory\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("Batch input must be a directory");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if output.is_none() || output.as_ref().unwrap().is_file() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"Batch output must be a directory\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("Batch output must be a directory");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(input).unwrap() {
|
||||
|
@ -107,29 +140,48 @@ fn main() {
|
|||
let cz = match cz::open(&path) {
|
||||
Ok(cz) => cz,
|
||||
Err(_) => {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
format!(
|
||||
"Could not open input as a CZ file: {}\n",
|
||||
path.into_os_string().to_str().unwrap()
|
||||
),
|
||||
)
|
||||
.print()
|
||||
.unwrap();
|
||||
pretty_error(&format!(
|
||||
"Could not open input as a CZ file: {}\n",
|
||||
path.into_os_string().to_str().unwrap()
|
||||
));
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
cz.save_as_png(&final_path).unwrap();
|
||||
image::save_buffer_with_format(
|
||||
final_path,
|
||||
cz.as_raw(),
|
||||
cz.header().width() as u32,
|
||||
cz.header().height() as u32,
|
||||
ColorType::Rgba8,
|
||||
image::ImageFormat::Png,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
} else {
|
||||
let cz = cz::open(input).unwrap();
|
||||
|
||||
if let Some(output) = output {
|
||||
cz.save_as_png(output).unwrap();
|
||||
image::save_buffer_with_format(
|
||||
output,
|
||||
cz.as_raw(),
|
||||
cz.header().width() as u32,
|
||||
cz.header().height() as u32,
|
||||
ColorType::Rgba8,
|
||||
image::ImageFormat::Png,
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
let file_stem = PathBuf::from(input.file_name().unwrap());
|
||||
cz.save_as_png(&file_stem.with_extension("png")).unwrap();
|
||||
image::save_buffer_with_format(
|
||||
file_stem.with_extension("png"),
|
||||
cz.as_raw(),
|
||||
cz.header().width() as u32,
|
||||
cz.header().height() as u32,
|
||||
ColorType::Rgba8,
|
||||
image::ImageFormat::Png,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,45 +194,30 @@ fn main() {
|
|||
depth,
|
||||
} => {
|
||||
if !input.exists() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"The original file provided does not exist\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("The input file does not exist");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if !replacement.exists() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"The replacement file provided does not exist\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("The replacement file does not exist");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// If it's a batch replacement, we want directories to search
|
||||
if *batch {
|
||||
if !input.is_dir() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"Batch input location must be a directory\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("Batch input must be a directory");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if !replacement.is_dir() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"Batch replacement location must be a directory\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("Batch replacement must be a directory");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if !output.is_dir() {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
"Batch output location must be a directory\n",
|
||||
)
|
||||
.exit()
|
||||
pretty_error("Batch output location must be a directory");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// Replace all the files within the directory and print errors for them
|
||||
|
@ -212,17 +249,89 @@ fn main() {
|
|||
}
|
||||
} else {
|
||||
if !input.is_file() {
|
||||
Error::raw(ErrorKind::ValueValidation, "Input must be a file\n").exit()
|
||||
pretty_error("Input must be a file");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if !replacement.is_file() {
|
||||
Error::raw(ErrorKind::ValueValidation, "Replacement must be a file\n").exit()
|
||||
pretty_error("Replacement must be a file");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// Replace the input file with the new image
|
||||
replace_cz(&input, &output, &replacement, version, depth).unwrap();
|
||||
}
|
||||
}
|
||||
Commands::Encode {
|
||||
input,
|
||||
output,
|
||||
version,
|
||||
depth,
|
||||
} => {
|
||||
if !input.exists() {
|
||||
pretty_error("The original file provided does not exist");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
let version = if let Some(v) = version {
|
||||
match CzVersion::try_from(*v) {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
pretty_error(&format!(
|
||||
"Invalid CZ version {}; must be 0, 1, 2, 3, or 4",
|
||||
v
|
||||
));
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
} else if output
|
||||
.extension()
|
||||
.is_some_and(|e| e.to_ascii_lowercase().to_string_lossy().starts_with("cz"))
|
||||
{
|
||||
let ext_string = output.extension().unwrap().to_string_lossy();
|
||||
let last_char = ext_string.chars().last().unwrap();
|
||||
match CzVersion::try_from(last_char) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
pretty_error(&format!("Invalid CZ type: {}", e));
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pretty_error("CZ version not specified or not parseable from file path");
|
||||
exit(1);
|
||||
};
|
||||
|
||||
let image = match image::open(input) {
|
||||
Ok(i) => i,
|
||||
Err(e) => {
|
||||
pretty_error(&format!("Could not open input file: {e}"));
|
||||
exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let image_depth = image.color();
|
||||
|
||||
let mut cz = CzFile::from_raw(
|
||||
version,
|
||||
image.width() as u16,
|
||||
image.height() as u16,
|
||||
image.to_rgba8().into_vec(),
|
||||
);
|
||||
if let Some(d) = *depth {
|
||||
if !(d == 8 || d == 24 || d == 32) {
|
||||
pretty_error(&format!(
|
||||
"The color depth provided is not valid. Choose from: {}",
|
||||
"8, 24, or 32".bright_magenta()
|
||||
));
|
||||
exit(1);
|
||||
}
|
||||
cz.header_mut().set_depth(d);
|
||||
} else {
|
||||
cz.header_mut().set_depth(image_depth.bits_per_pixel());
|
||||
}
|
||||
cz.save_as_cz(output).expect("Saving CZ file failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -244,7 +353,7 @@ fn replace_cz<P: ?Sized + AsRef<Path>>(
|
|||
}
|
||||
|
||||
// Open the replacement image and convert it to RGBA8
|
||||
let repl_img = image::open(&replacement_path)?.to_rgba8();
|
||||
let repl_img = image::open(replacement_path)?.to_rgba8();
|
||||
|
||||
// Open the original CZ file
|
||||
let mut cz = cz::open(&path)?;
|
||||
|
@ -268,3 +377,7 @@ fn replace_cz<P: ?Sized + AsRef<Path>>(
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pretty_error(message: &str) {
|
||||
eprintln!("{}: {}", "Error".red().italic(), message);
|
||||
}
|
||||
|
|
|
@ -1,18 +1,27 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
use clap::{error::{Error, ErrorKind}, Parser, Subcommand};
|
||||
use clap::{
|
||||
error::{Error, ErrorKind},
|
||||
Parser, Subcommand,
|
||||
};
|
||||
use lbee_utils::version;
|
||||
use luca_pak::Pak;
|
||||
use std::{fs, path::PathBuf, process::exit};
|
||||
|
||||
/// Utility to maniuplate PAK archive files from the LUCA System game engine by
|
||||
/// Prototype Ltd.
|
||||
#[derive(Parser)]
|
||||
#[command(name = "PAK Utility")]
|
||||
#[command(version, about, long_about = None)]
|
||||
#[command(author, version, about, long_about = None, disable_version_flag = true)]
|
||||
#[command(arg_required_else_help(true))]
|
||||
struct Cli {
|
||||
#[arg(value_name = "PAK FILE")]
|
||||
input: PathBuf,
|
||||
/// Show program version information
|
||||
#[arg(short('V'), long)]
|
||||
version: bool,
|
||||
|
||||
#[arg(value_name = "PAK FILE", required_unless_present("version"))]
|
||||
input: Option<PathBuf>,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
command: Option<Commands>,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
|
@ -56,12 +65,24 @@ enum Commands {
|
|||
fn main() {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let mut pak = match Pak::open(&cli.input) {
|
||||
if cli.version {
|
||||
println!("{}", version(env!("CARGO_BIN_NAME")));
|
||||
exit(0);
|
||||
}
|
||||
|
||||
let mut pak = match Pak::open(&cli.input.unwrap()) {
|
||||
Ok(pak) => pak,
|
||||
Err(err) => fmt_error(&format!("Could not open PAK file: {}", err)).exit()
|
||||
Err(err) => fmt_error(&format!("Could not open PAK file: {}", err)).exit(),
|
||||
};
|
||||
|
||||
match cli.command {
|
||||
let command = match cli.command {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
exit(0);
|
||||
}
|
||||
};
|
||||
|
||||
match command {
|
||||
Commands::Extract { output } => {
|
||||
if output.exists() && !output.is_dir() {
|
||||
fmt_error("The output given was not a directory").exit()
|
||||
|
@ -71,11 +92,21 @@ fn main() {
|
|||
|
||||
for entry in pak.entries() {
|
||||
let mut outpath = output.clone();
|
||||
outpath.push(entry.display_name());
|
||||
if let Some(n) = entry.name() {
|
||||
outpath.push(n);
|
||||
} else {
|
||||
outpath.push(entry.index().to_string())
|
||||
}
|
||||
entry.save(&outpath).unwrap();
|
||||
}
|
||||
},
|
||||
Commands::Replace { batch, name, id, replacement, output } => {
|
||||
}
|
||||
Commands::Replace {
|
||||
batch,
|
||||
name,
|
||||
id,
|
||||
replacement,
|
||||
output,
|
||||
} => {
|
||||
if id.is_some() && name.is_some() {
|
||||
fmt_error("Cannot use ID and name together").exit()
|
||||
}
|
||||
|
@ -90,12 +121,8 @@ fn main() {
|
|||
|
||||
for entry in fs::read_dir(replacement).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let search_name: String = entry
|
||||
.path()
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into();
|
||||
let search_name: String =
|
||||
entry.path().file_name().unwrap().to_string_lossy().into();
|
||||
|
||||
let parsed_id: Option<u32> = search_name.parse().ok();
|
||||
|
||||
|
@ -104,9 +131,15 @@ fn main() {
|
|||
|
||||
// Try replacing by name, if that fails, replace by parsed ID
|
||||
if pak.replace_by_name(search_name, &rep_data).is_err() {
|
||||
fmt_error("Could not replace entry in PAK: Could not find name").print().unwrap()
|
||||
} else if parsed_id.is_some() && pak.replace_by_id(parsed_id.unwrap(), &rep_data).is_err() {
|
||||
fmt_error("Could not replace entry in PAK: ID is invalid").print().unwrap()
|
||||
fmt_error("Could not replace entry in PAK: Could not find name")
|
||||
.print()
|
||||
.unwrap()
|
||||
} else if parsed_id.is_some()
|
||||
&& pak.replace_by_id(parsed_id.unwrap(), &rep_data).is_err()
|
||||
{
|
||||
fmt_error("Could not replace entry in PAK: ID is invalid")
|
||||
.print()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -117,11 +150,7 @@ fn main() {
|
|||
let search_name = if let Some(name) = name {
|
||||
name
|
||||
} else {
|
||||
replacement
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into()
|
||||
replacement.file_name().unwrap().to_string_lossy().into()
|
||||
};
|
||||
|
||||
let search_id = if id.is_some() {
|
||||
|
@ -152,8 +181,5 @@ fn main() {
|
|||
|
||||
#[inline(always)]
|
||||
fn fmt_error(message: &str) -> Error {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
format!("{}\n", message),
|
||||
)
|
||||
Error::raw(ErrorKind::ValueValidation, format!("{}\n", message))
|
||||
}
|
||||
|
|
12
utils/src/lib.rs
Normal file
12
utils/src/lib.rs
Normal file
|
@ -0,0 +1,12 @@
|
|||
use owo_colors::OwoColorize;
|
||||
|
||||
pub fn version(bin_name: &str) -> String {
|
||||
format!(
|
||||
"{}, {} v{} ({}, {})",
|
||||
bin_name,
|
||||
env!("CARGO_PKG_NAME").cyan(),
|
||||
env!("CARGO_PKG_VERSION").blue(),
|
||||
(&env!("VERGEN_GIT_SHA")[0..=6]).green(),
|
||||
env!("VERGEN_GIT_COMMIT_DATE").green(),
|
||||
)
|
||||
}
|
Loading…
Reference in a new issue