Refactor code and add hashbrown dependency

This commit is contained in:
Piotr Siuszko 2023-12-27 22:27:58 +01:00
parent 5851f52780
commit 533ad95e11
4 changed files with 126 additions and 57 deletions

64
Cargo.lock generated
View File

@ -8,6 +8,24 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "ahash"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "allocator-api2"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]]
name = "anstream"
version = "0.6.5"
@ -205,6 +223,17 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
dependencies = [
"ahash",
"allocator-api2",
"rayon",
]
[[package]]
name = "heck"
version = "0.4.1"
@ -225,10 +254,11 @@ checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456"
[[package]]
name = "lwa_unity_unpack"
version = "0.2.0"
version = "0.2.1"
dependencies = [
"clap",
"flate2",
"hashbrown",
"rayon",
"tar",
]
@ -251,6 +281,12 @@ dependencies = [
"adler",
]
[[package]]
name = "once_cell"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "proc-macro2"
version = "1.0.70"
@ -351,6 +387,12 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "windows-sys"
version = "0.52.0"
@ -427,3 +469,23 @@ dependencies = [
"linux-raw-sys",
"rustix",
]
[[package]]
name = "zerocopy"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View File

@ -22,5 +22,6 @@ opt-level = 2
[dependencies]
clap = { version = "4.4", features = ["derive"] }
flate2 = "1.0"
hashbrown = { version ="0.14.3", features = ["ahash","allocator-api2","inline-more","rayon"] }
rayon = "1.8.0"
tar = "0.4"

22
src/args.rs Normal file
View File

@ -0,0 +1,22 @@
use std::path::PathBuf;
use clap::Parser;
/// Program for unpacking unitypackages files.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
/// .unitypackage file to extract
#[arg(short, long)]
pub input: PathBuf,
/// target directory
#[arg(short, long)]
pub output: PathBuf,
/// optional- path to the tool that will auto convert fbx files to gltf during unpacking
#[arg(short, long)]
pub fbx_to_gltf: Option<PathBuf>,
/// optional- extensions that will be ignored during unpacking
#[arg(long, action = clap::ArgAction::Append)]
pub ignore_extensions: Option<Vec<String>>,
}

View File

@ -1,36 +1,18 @@
mod args;
use flate2::read::GzDecoder;
use std::collections::HashMap;
use hashbrown::HashMap;
use std::ffi::OsStr;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::{fs, io, sync::Arc};
use tar::Archive;
use clap::Parser;
use rayon::prelude::*;
use std::io::prelude::*;
use std::io::BufReader;
use std::process::Command;
/// Program for unpacking unitypackages files.
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
/// .unitypackage file to extract
#[arg(short, long)]
input: PathBuf,
/// target directory
#[arg(short, long)]
output: PathBuf,
/// optional- path to the tool that will auto convert fbx files to gltf during unpacking
#[arg(short, long)]
fbx_to_gltf: Option<PathBuf>,
/// optional- extensions that will be ignored during unpacking
#[arg(long, action = clap::ArgAction::Append)]
ignore_extensions: Option<Vec<String>>,
}
use std::sync::mpsc::channel;
pub fn extract_archive(archive_path: &Path, extract_to: &Path) -> io::Result<()> {
let tar_gz = File::open(archive_path)?;
@ -41,7 +23,7 @@ pub fn extract_archive(archive_path: &Path, extract_to: &Path) -> io::Result<()>
}
fn main() {
let args: Args = Args::parse();
let args = crate::args::Args::parse();
let ignored_extensions = args.ignore_extensions.unwrap_or_default();
let archive_path = Path::new(&args.input);
let tmp_dir = Path::new("./tmp_dir");
@ -61,55 +43,57 @@ fn main() {
fs::remove_dir_all(output_dir).unwrap();
}
fs::create_dir(output_dir).unwrap();
let mut mapping: HashMap<String, String> = HashMap::new();
let (sender, receiver) = channel();
for entry in fs::read_dir(tmp_dir).unwrap() {
fs::read_dir(tmp_dir).unwrap().par_bridge().for_each_with(sender, |s,entry| {
let entry = entry.unwrap();
let root_file = entry.path();
let asset = entry.file_name().into_string().unwrap();
if root_file.is_dir() {
let mut real_path = String::new();
let mut extension = None;
let mut has_asset = false;
for sub_entry in fs::read_dir(root_file.clone()).unwrap() {
let sub_entry = sub_entry.unwrap();
let file_name = sub_entry.file_name().into_string().unwrap();
if file_name == "pathname" {
let path = sub_entry.path();
let file = File::open(path).unwrap();
let buf_reader = BufReader::new(file);
let line = buf_reader.lines().next();
match line {
Some(Ok(path)) => {
real_path = path;
if let Some(e) =
Path::new(&real_path).extension().and_then(OsStr::to_str)
{
extension = Some(String::from(e));
}
if !root_file.is_dir() {
return;
}
let mut real_path = String::new();
let mut extension = None;
let mut has_asset = false;
for sub_entry in fs::read_dir(root_file.clone()).unwrap() {
let sub_entry = sub_entry.unwrap();
let file_name = sub_entry.file_name().into_string().unwrap();
if file_name == "pathname" {
let path = sub_entry.path();
let file = File::open(path).unwrap();
let buf_reader = BufReader::new(file);
let line = buf_reader.lines().next();
match line {
Some(Ok(path)) => {
real_path = path;
if let Some(e) =
Path::new(&real_path).extension().and_then(OsStr::to_str)
{
extension = Some(String::from(e));
}
_ => continue,
}
} else if file_name == "asset" {
has_asset = true;
_ => continue,
}
}
if has_asset && !ignored_extensions.contains(&extension.unwrap_or_default()) {
mapping.insert(asset, real_path);
} else if file_name == "asset" {
has_asset = true;
}
}
}
println!("Results:");
if has_asset && !ignored_extensions.contains(&extension.unwrap_or_default()) {
s.send((asset, real_path)).unwrap();
}
});
let mapping: HashMap<String, String> = receiver.iter().collect();
let mapping_arc = Arc::new(mapping);
let tmp_dir = Arc::new(tmp_dir);
let output_dir = Arc::new(output_dir);
mapping_arc.par_iter().for_each(|(asset_hash, asset_path)| {
let path = Path::new(asset_path);
let source_asset = Path::new(&*tmp_dir).join(asset_hash).join("asset");
let result_path = output_dir.join(path);
let source_asset = Path::new(&*tmp_dir).join(&asset_hash).join("asset");
let result_path = output_dir.join(&path);
process_directory(asset_hash, asset_path, &result_path);
process_directory(&asset_hash, &asset_path, &result_path);
check_source_asset_exists(&source_asset);
if args.fbx_to_gltf.is_some() {