mirror of
https://github.com/G2-Games/lbee-utils.git
synced 2025-04-19 07:12:55 -05:00
Ran cargo fmt
This commit is contained in:
parent
fddcf2f055
commit
42014a6eb2
8 changed files with 130 additions and 127 deletions
|
@ -391,7 +391,7 @@ fn compress_lzw2(data: &[u8], last: Vec<u8>) -> (usize, Vec<u8>, Vec<u8>) {
|
|||
|
||||
if dictionary_count >= 0x3FFFE {
|
||||
count -= 1;
|
||||
break
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -223,12 +223,7 @@ impl DynamicCz {
|
|||
|
||||
/// Create a CZ# image from RGBA bytes. The bytes *must* be RGBA, as that
|
||||
/// is the only format that is used internally.
|
||||
pub fn from_raw(
|
||||
version: CzVersion,
|
||||
width: u16,
|
||||
height: u16,
|
||||
bitmap: Vec<u8>,
|
||||
) -> Self {
|
||||
pub fn from_raw(version: CzVersion, width: u16, height: u16, bitmap: Vec<u8>) -> Self {
|
||||
let header_common = CommonHeader::new(version, width, height);
|
||||
|
||||
Self {
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use std::{
|
||||
error::Error, fs::File, io::{BufWriter, Write}, path::Path
|
||||
error::Error,
|
||||
fs::File,
|
||||
io::{BufWriter, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// A single file entry in a PAK file
|
||||
|
@ -87,7 +90,6 @@ impl Entry {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum EntryType {
|
||||
CZ0,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use std::io::{self, Write};
|
||||
use byteorder::WriteBytesExt;
|
||||
use std::io::{self, Write};
|
||||
|
||||
use crate::LE;
|
||||
|
||||
|
|
|
@ -5,7 +5,10 @@ use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
|||
use header::Header;
|
||||
use log::{debug, info};
|
||||
use std::{
|
||||
ffi::CString, fs::File, io::{self, BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write}, path::{Path, PathBuf}
|
||||
ffi::CString,
|
||||
fs::File,
|
||||
io::{self, BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -63,7 +66,7 @@ pub struct PakLimits {
|
|||
impl Default for PakLimits {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
entry_limit: 10_000, // 10,000 entries
|
||||
entry_limit: 10_000, // 10,000 entries
|
||||
size_limit: 10_000_000_000, // 10 gb
|
||||
}
|
||||
}
|
||||
|
@ -74,11 +77,7 @@ impl Pak {
|
|||
pub fn open<P: ?Sized + AsRef<Path>>(path: &P) -> Result<Self, PakError> {
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
Pak::decode(
|
||||
&mut file,
|
||||
path.as_ref().to_path_buf(),
|
||||
PakLimits::default()
|
||||
)
|
||||
Pak::decode(&mut file, path.as_ref().to_path_buf(), PakLimits::default())
|
||||
}
|
||||
|
||||
/// Decode a PAK file from a byte stream.
|
||||
|
@ -105,7 +104,7 @@ impl Pak {
|
|||
};
|
||||
|
||||
if header.entry_count >= limits.entry_limit as u32 {
|
||||
return Err(PakError::EntryLimit(header.entry_count, limits.entry_limit))
|
||||
return Err(PakError::EntryLimit(header.entry_count, limits.entry_limit));
|
||||
}
|
||||
info!("{} entries detected", header.entry_count);
|
||||
debug!("Block size is {} bytes", header.block_size);
|
||||
|
@ -139,10 +138,7 @@ impl Pak {
|
|||
for _ in 0..header.entry_count() {
|
||||
let offset = input.read_u32::<LE>().unwrap();
|
||||
let length = input.read_u32::<LE>().unwrap();
|
||||
offsets.push(EntryLocation {
|
||||
offset,
|
||||
length,
|
||||
});
|
||||
offsets.push(EntryLocation { offset, length });
|
||||
}
|
||||
|
||||
// Read all unknown_data1
|
||||
|
@ -180,7 +176,11 @@ impl Pak {
|
|||
// Read all entry data
|
||||
debug!("Creating entry list");
|
||||
let mut entries: Vec<Entry> = Vec::new();
|
||||
for (i, offset_info) in offsets.iter().enumerate().take(header.entry_count() as usize) {
|
||||
for (i, offset_info) in offsets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.take(header.entry_count() as usize)
|
||||
{
|
||||
debug!("Seeking to block {}", offset_info.offset);
|
||||
// Seek to and read the entry data
|
||||
input
|
||||
|
@ -237,18 +237,16 @@ impl Pak {
|
|||
}
|
||||
|
||||
/// Encode a PAK file into a byte stream.
|
||||
pub fn encode<T: Write>(
|
||||
&self,
|
||||
mut output: &mut T
|
||||
) -> Result<(), PakError> {
|
||||
pub fn encode<T: Write>(&self, mut output: &mut T) -> Result<(), PakError> {
|
||||
self.header.write_into(&mut output)?;
|
||||
|
||||
// Write unknown data
|
||||
output.write_all(
|
||||
&self.unknown_pre_data
|
||||
&self
|
||||
.unknown_pre_data
|
||||
.iter()
|
||||
.flat_map(|dw| dw.to_le_bytes())
|
||||
.collect::<Vec<u8>>()
|
||||
.collect::<Vec<u8>>(),
|
||||
)?;
|
||||
|
||||
// Write offsets and lengths
|
||||
|
@ -267,15 +265,11 @@ impl Pak {
|
|||
// Write names if the flags indicate it should have them
|
||||
if self.header.flags().has_names() {
|
||||
if let Some(subdir) = &self.subdirectory {
|
||||
output.write_all(
|
||||
CString::new(subdir.as_bytes()).unwrap().to_bytes_with_nul()
|
||||
)?;
|
||||
output.write_all(CString::new(subdir.as_bytes()).unwrap().to_bytes_with_nul())?;
|
||||
}
|
||||
for entry in self.entries() {
|
||||
let name = entry.name.as_ref().unwrap();
|
||||
output.write_all(
|
||||
CString::new(name.as_bytes()).unwrap().to_bytes_with_nul()
|
||||
)?;
|
||||
output.write_all(CString::new(name.as_bytes()).unwrap().to_bytes_with_nul())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,7 +279,11 @@ impl Pak {
|
|||
|
||||
for entry in self.entries() {
|
||||
//let block_size = entry.data.len().div_ceil(self.header().block_size as usize);
|
||||
let mut remainder = 2048 - entry.data.len().rem_euclid(self.header().block_size as usize);
|
||||
let mut remainder = 2048
|
||||
- entry
|
||||
.data
|
||||
.len()
|
||||
.rem_euclid(self.header().block_size as usize);
|
||||
if remainder == 2048 {
|
||||
remainder = 0;
|
||||
}
|
||||
|
@ -306,11 +304,7 @@ impl Pak {
|
|||
///
|
||||
/// This function updates the offsets of all entries to fit within the
|
||||
/// chunk size specified in the header.
|
||||
pub fn replace(
|
||||
&mut self,
|
||||
index: usize,
|
||||
replacement_bytes: &[u8],
|
||||
) -> Result<(), PakError> {
|
||||
pub fn replace(&mut self, index: usize, replacement_bytes: &[u8]) -> Result<(), PakError> {
|
||||
let block_size = self.header().block_size();
|
||||
|
||||
let replaced_entry;
|
||||
|
@ -318,7 +312,7 @@ impl Pak {
|
|||
replaced_entry = entry
|
||||
} else {
|
||||
log::error!("Entry {} not found!", index);
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
if let Some(name) = replaced_entry.name() {
|
||||
|
@ -332,8 +326,7 @@ impl Pak {
|
|||
replaced_entry.length = replaced_entry.data.len() as u32;
|
||||
|
||||
// Get the offset of the next entry based on the current one
|
||||
let mut next_offset =
|
||||
replaced_entry.offset + replaced_entry.length.div_ceil(block_size);
|
||||
let mut next_offset = replaced_entry.offset + replaced_entry.length.div_ceil(block_size);
|
||||
|
||||
// Update the position of all subsequent entries
|
||||
let mut i = 0;
|
||||
|
@ -361,7 +354,7 @@ impl Pak {
|
|||
let index = if let Some(entry) = entry {
|
||||
entry.index
|
||||
} else {
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
self.replace(index, replacement_bytes)?;
|
||||
|
@ -369,16 +362,12 @@ impl Pak {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn replace_by_id(
|
||||
&mut self,
|
||||
id: u32,
|
||||
replacement_bytes: &[u8],
|
||||
) -> Result<(), PakError> {
|
||||
pub fn replace_by_id(&mut self, id: u32, replacement_bytes: &[u8]) -> Result<(), PakError> {
|
||||
let entry = self.get_entry_by_id(id);
|
||||
let index = if let Some(entry) = entry {
|
||||
entry.index
|
||||
} else {
|
||||
return Err(PakError::IndexError)
|
||||
return Err(PakError::IndexError);
|
||||
};
|
||||
|
||||
self.replace(index, replacement_bytes)?;
|
||||
|
@ -397,16 +386,13 @@ impl Pak {
|
|||
|
||||
/// Get an individual entry from the PAK by its ID
|
||||
pub fn get_entry_by_id(&mut self, id: u32) -> Option<&mut Entry> {
|
||||
self.entries
|
||||
.get_mut((id - self.header.id_start) as usize)
|
||||
self.entries.get_mut((id - self.header.id_start) as usize)
|
||||
}
|
||||
|
||||
pub fn get_entry_by_name(&mut self, name: &str) -> Option<&mut Entry> {
|
||||
self.entries
|
||||
.iter_mut()
|
||||
.find(|e|
|
||||
e.name.as_ref().is_some_and(|n| n == name)
|
||||
)
|
||||
.find(|e| e.name.as_ref().is_some_and(|n| n == name))
|
||||
}
|
||||
|
||||
/// Get a list of all entries from the PAK
|
||||
|
@ -418,8 +404,7 @@ impl Pak {
|
|||
pub fn contains_name(&self, name: &str) -> bool {
|
||||
self.entries
|
||||
.iter()
|
||||
.any(|e| e.name.as_ref()
|
||||
.is_some_and(|n| n == name))
|
||||
.any(|e| e.name.as_ref().is_some_and(|n| n == name))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release
|
||||
|
||||
use std::fs;
|
||||
use colog;
|
||||
use eframe::egui::{self, ColorImage, Image, TextureFilter, TextureHandle, TextureOptions};
|
||||
use log::error;
|
||||
use luca_pak::{entry::EntryType, Pak};
|
||||
use std::fs;
|
||||
|
||||
fn main() -> eframe::Result {
|
||||
colog::default_builder()
|
||||
|
@ -20,11 +20,15 @@ fn main() -> eframe::Result {
|
|||
let mut fonts = egui::FontDefinitions::default();
|
||||
fonts.font_data.insert(
|
||||
"Noto Sans".to_owned(),
|
||||
egui::FontData::from_static(include_bytes!("/home/g2/Downloads/Noto_Sans/static/NotoSans-Regular.ttf")),
|
||||
egui::FontData::from_static(include_bytes!(
|
||||
"/home/g2/Downloads/Noto_Sans/static/NotoSans-Regular.ttf"
|
||||
)),
|
||||
);
|
||||
fonts.font_data.insert(
|
||||
"Noto Sans Japanese".to_owned(),
|
||||
egui::FontData::from_static(include_bytes!("/home/g2/Downloads/Noto_Sans_JP/static/NotoSansJP-Regular.ttf")),
|
||||
egui::FontData::from_static(include_bytes!(
|
||||
"/home/g2/Downloads/Noto_Sans_JP/static/NotoSansJP-Regular.ttf"
|
||||
)),
|
||||
);
|
||||
fonts
|
||||
.families
|
||||
|
@ -80,7 +84,7 @@ impl eframe::App for PakExplorer {
|
|||
Err(e) => {
|
||||
error!("Unable to read selected file as PAK: {}", e);
|
||||
None
|
||||
},
|
||||
}
|
||||
};
|
||||
self.open_file = pak;
|
||||
self.selected_entry = None;
|
||||
|
@ -91,8 +95,8 @@ impl eframe::App for PakExplorer {
|
|||
if let Some(pak) = &self.open_file {
|
||||
if ui.button("Save PAK").clicked() {
|
||||
if let Some(path) = rfd::FileDialog::new()
|
||||
.set_file_name(pak.path().file_name().unwrap().to_string_lossy())
|
||||
.save_file()
|
||||
.set_file_name(pak.path().file_name().unwrap().to_string_lossy())
|
||||
.save_file()
|
||||
{
|
||||
pak.save(&path).unwrap();
|
||||
}
|
||||
|
@ -103,7 +107,10 @@ impl eframe::App for PakExplorer {
|
|||
ui.separator();
|
||||
|
||||
if let Some(pak) = &self.open_file {
|
||||
ui.label(format!("Opened {}", pak.path().file_name().unwrap().to_string_lossy()));
|
||||
ui.label(format!(
|
||||
"Opened {}",
|
||||
pak.path().file_name().unwrap().to_string_lossy()
|
||||
));
|
||||
ui.label(format!("Contains {} Entries", pak.entries().len()));
|
||||
|
||||
let selection = if let Some(entry) = &self.selected_entry {
|
||||
|
@ -115,23 +122,23 @@ impl eframe::App for PakExplorer {
|
|||
egui::ComboBox::from_id_source("my-combobox")
|
||||
.selected_text(selection)
|
||||
.truncate()
|
||||
.show_ui(ui, |ui|
|
||||
{
|
||||
ui.selectable_value(&mut self.selected_entry, None, "");
|
||||
for entry in pak.entries() {
|
||||
if ui.selectable_value(
|
||||
&mut self.selected_entry,
|
||||
Some(entry.clone()),
|
||||
entry.display_name(),
|
||||
).clicked() {
|
||||
self.image_texture = None;
|
||||
};
|
||||
}
|
||||
});
|
||||
.show_ui(ui, |ui| {
|
||||
ui.selectable_value(&mut self.selected_entry, None, "");
|
||||
for entry in pak.entries() {
|
||||
if ui
|
||||
.selectable_value(
|
||||
&mut self.selected_entry,
|
||||
Some(entry.clone()),
|
||||
entry.display_name(),
|
||||
)
|
||||
.clicked()
|
||||
{
|
||||
self.image_texture = None;
|
||||
};
|
||||
}
|
||||
});
|
||||
} else {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("No File Opened")
|
||||
);
|
||||
ui.centered_and_justified(|ui| ui.label("No File Opened"));
|
||||
}
|
||||
|
||||
if let Some(entry) = &self.selected_entry {
|
||||
|
@ -155,10 +162,12 @@ impl eframe::App for PakExplorer {
|
|||
}
|
||||
});
|
||||
match entry.file_type() {
|
||||
EntryType::CZ0 | EntryType::CZ1
|
||||
| EntryType::CZ2 | EntryType::CZ3
|
||||
| EntryType::CZ4 | EntryType::CZ5 =>
|
||||
{
|
||||
EntryType::CZ0
|
||||
| EntryType::CZ1
|
||||
| EntryType::CZ2
|
||||
| EntryType::CZ3
|
||||
| EntryType::CZ4
|
||||
| EntryType::CZ5 => {
|
||||
if ui.button("Save as PNG").clicked() {
|
||||
let mut display_name = entry.display_name();
|
||||
display_name.push_str(".png");
|
||||
|
@ -166,7 +175,10 @@ impl eframe::App for PakExplorer {
|
|||
.set_file_name(display_name)
|
||||
.save_file()
|
||||
{
|
||||
let cz = cz::DynamicCz::decode(&mut std::io::Cursor::new(entry.as_bytes())).unwrap();
|
||||
let cz = cz::DynamicCz::decode(&mut std::io::Cursor::new(
|
||||
entry.as_bytes(),
|
||||
))
|
||||
.unwrap();
|
||||
cz.save_as_png(&path).unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -174,37 +186,39 @@ impl eframe::App for PakExplorer {
|
|||
ui.separator();
|
||||
|
||||
let texture: &TextureHandle = self.image_texture.get_or_insert_with(|| {
|
||||
let cz = cz::DynamicCz::decode(&mut std::io::Cursor::new(entry.as_bytes())).unwrap();
|
||||
let cz =
|
||||
cz::DynamicCz::decode(&mut std::io::Cursor::new(entry.as_bytes()))
|
||||
.unwrap();
|
||||
let image = ColorImage::from_rgba_unmultiplied(
|
||||
[cz.header().width() as usize, cz.header().height() as usize],
|
||||
cz.as_raw()
|
||||
cz.as_raw(),
|
||||
);
|
||||
ui.ctx().load_texture("eventframe", image, TextureOptions {
|
||||
magnification: TextureFilter::Nearest,
|
||||
minification: TextureFilter::Linear,
|
||||
..Default::default()
|
||||
})
|
||||
ui.ctx().load_texture(
|
||||
"eventframe",
|
||||
image,
|
||||
TextureOptions {
|
||||
magnification: TextureFilter::Nearest,
|
||||
minification: TextureFilter::Linear,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.centered_and_justified(|ui| {
|
||||
ui.add(
|
||||
Image::from_texture(texture)
|
||||
.show_loading_spinner(true)
|
||||
.shrink_to_fit()
|
||||
.rounding(2.0)
|
||||
.rounding(2.0),
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("No Preview Available")
|
||||
);
|
||||
},
|
||||
ui.centered_and_justified(|ui| ui.label("No Preview Available"));
|
||||
}
|
||||
}
|
||||
} else if self.open_file.is_some() {
|
||||
ui.centered_and_justified(|ui|
|
||||
ui.label("Select an Entry")
|
||||
);
|
||||
ui.centered_and_justified(|ui| ui.label("Select an Entry"));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
use clap::{error::ErrorKind, Error, Parser, Subcommand};
|
||||
use std::{fs, path::{Path, PathBuf}};
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
/// Utility to maniuplate CZ image files from the LUCA System game engine by
|
||||
/// Prototype Ltd.
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use std::{fs, path::PathBuf};
|
||||
use clap::{error::{Error, ErrorKind}, Parser, Subcommand};
|
||||
use clap::{
|
||||
error::{Error, ErrorKind},
|
||||
Parser, Subcommand,
|
||||
};
|
||||
use luca_pak::Pak;
|
||||
use std::{fs, path::PathBuf};
|
||||
|
||||
/// Utility to maniuplate PAK archive files from the LUCA System game engine by
|
||||
/// Prototype Ltd.
|
||||
|
@ -58,7 +61,7 @@ fn main() {
|
|||
|
||||
let mut pak = match Pak::open(&cli.input) {
|
||||
Ok(pak) => pak,
|
||||
Err(err) => fmt_error(&format!("Could not open PAK file: {}", err)).exit()
|
||||
Err(err) => fmt_error(&format!("Could not open PAK file: {}", err)).exit(),
|
||||
};
|
||||
|
||||
match cli.command {
|
||||
|
@ -74,8 +77,14 @@ fn main() {
|
|||
outpath.push(entry.display_name());
|
||||
entry.save(&outpath).unwrap();
|
||||
}
|
||||
},
|
||||
Commands::Replace { batch, name, id, replacement, output } => {
|
||||
}
|
||||
Commands::Replace {
|
||||
batch,
|
||||
name,
|
||||
id,
|
||||
replacement,
|
||||
output,
|
||||
} => {
|
||||
if id.is_some() && name.is_some() {
|
||||
fmt_error("Cannot use ID and name together").exit()
|
||||
}
|
||||
|
@ -90,12 +99,8 @@ fn main() {
|
|||
|
||||
for entry in fs::read_dir(replacement).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let search_name: String = entry
|
||||
.path()
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into();
|
||||
let search_name: String =
|
||||
entry.path().file_name().unwrap().to_string_lossy().into();
|
||||
|
||||
let parsed_id: Option<u32> = search_name.parse().ok();
|
||||
|
||||
|
@ -104,9 +109,15 @@ fn main() {
|
|||
|
||||
// Try replacing by name, if that fails, replace by parsed ID
|
||||
if pak.replace_by_name(search_name, &rep_data).is_err() {
|
||||
fmt_error("Could not replace entry in PAK: Could not find name").print().unwrap()
|
||||
} else if parsed_id.is_some() && pak.replace_by_id(parsed_id.unwrap(), &rep_data).is_err() {
|
||||
fmt_error("Could not replace entry in PAK: ID is invalid").print().unwrap()
|
||||
fmt_error("Could not replace entry in PAK: Could not find name")
|
||||
.print()
|
||||
.unwrap()
|
||||
} else if parsed_id.is_some()
|
||||
&& pak.replace_by_id(parsed_id.unwrap(), &rep_data).is_err()
|
||||
{
|
||||
fmt_error("Could not replace entry in PAK: ID is invalid")
|
||||
.print()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -117,11 +128,7 @@ fn main() {
|
|||
let search_name = if let Some(name) = name {
|
||||
name
|
||||
} else {
|
||||
replacement
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.into()
|
||||
replacement.file_name().unwrap().to_string_lossy().into()
|
||||
};
|
||||
|
||||
let search_id = if id.is_some() {
|
||||
|
@ -152,8 +159,5 @@ fn main() {
|
|||
|
||||
#[inline(always)]
|
||||
fn fmt_error(message: &str) -> Error {
|
||||
Error::raw(
|
||||
ErrorKind::ValueValidation,
|
||||
format!("{}\n", message),
|
||||
)
|
||||
Error::raw(ErrorKind::ValueValidation, format!("{}\n", message))
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue