From 5c963e1d9d38e49adec2535ea8e1c0a680441ba9 Mon Sep 17 00:00:00 2001 From: Piotr Siuszko Date: Wed, 27 Dec 2023 23:04:41 +0100 Subject: [PATCH] Refactor unpacking functionality into separate module --- src/args.rs | 6 +- src/main.rs | 158 +++----------------------------------------- src/unpacker.rs | 172 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 185 insertions(+), 151 deletions(-) create mode 100644 src/unpacker.rs diff --git a/src/args.rs b/src/args.rs index 8231c5a..c46d32f 100644 --- a/src/args.rs +++ b/src/args.rs @@ -1,8 +1,8 @@ -use std::path::PathBuf; use clap::Parser; +use std::path::{PathBuf}; /// Program for unpacking unitypackages files. -#[derive(Parser, Debug)] +#[derive(Parser, Debug, Clone)] #[command(author, version, about, long_about = None)] pub struct Args { /// .unitypackage file to extract @@ -19,4 +19,4 @@ pub struct Args { /// optional- extensions that will be ignored during unpacking #[arg(long, action = clap::ArgAction::Append)] pub ignore_extensions: Option>, -} \ No newline at end of file +} diff --git a/src/main.rs b/src/main.rs index f0341dd..ebdc15a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,157 +1,19 @@ mod args; +mod unpacker; -use flate2::read::GzDecoder; -use hashbrown::HashMap; -use std::ffi::OsStr; -use std::fs::File; -use std::path::{Path, PathBuf}; -use std::{fs, io, sync::Arc}; -use tar::Archive; use clap::Parser; -use rayon::prelude::*; -use std::io::prelude::*; -use std::io::BufReader; -use std::process::Command; -use std::sync::mpsc::channel; -pub fn extract_archive(archive_path: &Path, extract_to: &Path) -> io::Result<()> { - let tar_gz = File::open(archive_path)?; - let tar = GzDecoder::new(tar_gz); - let mut archive = Archive::new(tar); - archive.unpack(extract_to)?; - Ok(()) -} + + + + + + fn main() { let args = crate::args::Args::parse(); - let ignored_extensions = args.ignore_extensions.unwrap_or_default(); - let archive_path = Path::new(&args.input); - let tmp_dir = Path::new("./tmp_dir"); - let output_dir = Path::new(&args.output); - if !archive_path.exists() { - panic!("Input file does not exits"); - } - if tmp_dir.exists() { - println!("Temp directory exits, cleaning up first."); - fs::remove_dir_all(tmp_dir).unwrap(); - } - if let Err(e) = extract_archive(archive_path, tmp_dir) { - println!("Failed to extract archive: {}", e); - } - if output_dir.exists() { - println!("Output directory exits, cleaning up first."); - fs::remove_dir_all(output_dir).unwrap(); - } - fs::create_dir(output_dir).unwrap(); - let (sender, receiver) = channel(); + let unpacker = crate::unpacker::Unpacker { args }; - fs::read_dir(tmp_dir).unwrap().par_bridge().for_each_with(sender, |s,entry| { - let entry = entry.unwrap(); - let root_file = entry.path(); - let asset = entry.file_name().into_string().unwrap(); - if !root_file.is_dir() { - return; - } - let mut real_path = String::new(); - let mut extension = None; - let mut has_asset = false; - for sub_entry in fs::read_dir(root_file.clone()).unwrap() { - let sub_entry = sub_entry.unwrap(); - let file_name = sub_entry.file_name().into_string().unwrap(); - if file_name == "pathname" { - let path = sub_entry.path(); - let file = File::open(path).unwrap(); - let buf_reader = BufReader::new(file); - let line = buf_reader.lines().next(); - match line { - Some(Ok(path)) => { - real_path = path; - if let Some(e) = - Path::new(&real_path).extension().and_then(OsStr::to_str) - { - extension = Some(String::from(e)); - } - } - _ => continue, - } - } else if file_name == "asset" { - has_asset = true; - } - } - if has_asset && !ignored_extensions.contains(&extension.unwrap_or_default()) { - s.send((asset, real_path)).unwrap(); - } - }); - let mapping: HashMap = receiver.iter().collect(); - let mapping_arc = Arc::new(mapping); - - let tmp_dir = Arc::new(tmp_dir); - let output_dir = Arc::new(output_dir); - - mapping_arc.par_iter().for_each(|(asset_hash, asset_path)| { - let path = Path::new(asset_path); - let source_asset = Path::new(&*tmp_dir).join(&asset_hash).join("asset"); - let result_path = output_dir.join(&path); - - process_directory(&asset_hash, &asset_path, &result_path); - check_source_asset_exists(&source_asset); - - if args.fbx_to_gltf.is_some() { - if let Some("fbx") = path.extension().and_then(OsStr::to_str) { - process_fbx_file( - &source_asset, - &result_path, - &args.fbx_to_gltf.clone().unwrap(), - ); - return; - } - } - - process_non_fbx_file(&source_asset, &result_path); - }); - - fs::remove_dir_all(Path::new(&*tmp_dir)).unwrap(); - - fn process_directory(asset_hash: &str, asset_path: &str, result_path: &Path) { - println!("{}: {:?}", asset_hash, asset_path); - let result_dir = result_path.parent().unwrap(); - if !result_dir.exists() { - fs::create_dir_all(result_dir).unwrap(); - } - } - - fn check_source_asset_exists(source_asset: &Path) { - if !source_asset.exists() { - panic!("SOURCE ASSET DOES NOT EXIST: {}", source_asset.display()); - } - } - - fn process_fbx_file(source_asset: &Path, result_path: &Path, tool: &PathBuf) { - let out_path = result_path.with_extension(""); - println!( - "{:?}", - &[ - "--input", - source_asset.to_str().unwrap(), - "--output", - out_path.to_str().unwrap() - ] - ); - let output = Command::new(tool) - .args([ - "--input", - source_asset.to_str().unwrap(), - "-b", - "--output", - out_path.to_str().unwrap(), - ]) - .output() - .unwrap(); - let output_result = String::from_utf8_lossy(&output.stdout); - println!("output: {}", output_result); - } - - fn process_non_fbx_file(source_asset: &Path, result_path: &Path) { - fs::rename(source_asset, result_path).unwrap(); - } + unpacker.prepare_environment(); + unpacker.process_data(); } diff --git a/src/unpacker.rs b/src/unpacker.rs new file mode 100644 index 0000000..a6659a8 --- /dev/null +++ b/src/unpacker.rs @@ -0,0 +1,172 @@ +use flate2::read::GzDecoder; +use hashbrown::HashMap; +use std::ffi::OsStr; +use std::fs::File; +use std::io::BufRead; +use std::io::BufReader; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::sync::Arc; +use std::{fs, io}; + +use rayon::prelude::*; +use std::sync::mpsc::channel; +use tar::Archive; + +#[derive(Clone)] +pub struct Unpacker { + pub args: crate::args::Args, +} + +impl Unpacker { + pub fn prepare_environment(&self) { + let archive_path = Path::new(&self.args.input); + let output_dir = Path::new(&self.args.output); + let tmp_path = Path::new("./tmp_dir"); + if !archive_path.exists() { + panic!("Input file does not exits"); + } + if tmp_path.exists() { + println!("Temp directory exits, cleaning up first."); + fs::remove_dir_all(tmp_path).unwrap(); + } + if output_dir.exists() { + println!("Output directory exits, cleaning up first."); + fs::remove_dir_all(output_dir).unwrap(); + } + } + + pub fn process_data(&self) { + let archive_path = Path::new(&self.args.input); + let output_dir = Path::new(&self.args.output); + let tmp_path = Path::new("./tmp_dir"); + if let Err(e) = Unpacker::extract_archive(archive_path, tmp_path) { + println!("Failed to extract archive: {}", e); + } + + let (sender, receiver) = channel(); + let ignored_extensions = self.args.clone().ignore_extensions.unwrap_or_default(); + + fs::read_dir(tmp_path) + .unwrap() + .par_bridge() + .for_each_with(sender, |s, entry| { + let entry = entry.unwrap(); + let root_file = entry.path(); + let asset = entry.file_name().into_string().unwrap(); + if !root_file.is_dir() { + return; + } + let mut real_path = String::new(); + let mut extension = None; + let mut has_asset = false; + for sub_entry in fs::read_dir(root_file.clone()).unwrap() { + let sub_entry = sub_entry.unwrap(); + let file_name = sub_entry.file_name().into_string().unwrap(); + if file_name == "pathname" { + let path = sub_entry.path(); + let file = File::open(path).unwrap(); + let buf_reader = BufReader::new(file); + let line = buf_reader.lines().next(); + match line { + Some(Ok(path)) => { + real_path = path; + if let Some(e) = + Path::new(&real_path).extension().and_then(OsStr::to_str) + { + extension = Some(String::from(e)); + } + } + _ => continue, + } + } else if file_name == "asset" { + has_asset = true; + } + } + if has_asset && !ignored_extensions.contains(&extension.unwrap_or_default()) { + s.send((asset, real_path)).unwrap(); + } + }); + + let tmp_dir = Arc::new(tmp_path); + fs::create_dir(output_dir).unwrap(); + let output_dir = Arc::new(output_dir); + let mapping: HashMap = receiver.iter().collect(); + let mapping_arc = Arc::new(mapping); + + mapping_arc.par_iter().for_each(|(asset_hash, asset_path)| { + let path = Path::new(asset_path); + let source_asset = Path::new(&*tmp_dir).join(asset_hash).join("asset"); + let result_path = output_dir.join(path); + + process_directory(asset_hash, asset_path, &result_path); + check_source_asset_exists(&source_asset); + + if self.args.fbx_to_gltf.is_some() { + if let Some("fbx") = path.extension().and_then(OsStr::to_str) { + process_fbx_file( + &source_asset, + &result_path, + &self.args.fbx_to_gltf.clone().unwrap(), + ); + return; + } + } + + process_non_fbx_file(&source_asset, &result_path); + }); + + fs::remove_dir_all(Path::new(&*tmp_dir)).unwrap(); + + fn process_directory(asset_hash: &str, asset_path: &str, result_path: &Path) { + println!("{}: {:?}", asset_hash, asset_path); + let result_dir = result_path.parent().unwrap(); + if !result_dir.exists() { + fs::create_dir_all(result_dir).unwrap(); + } + } + + fn check_source_asset_exists(source_asset: &Path) { + if !source_asset.exists() { + panic!("SOURCE ASSET DOES NOT EXIST: {}", source_asset.display()); + } + } + + fn process_fbx_file(source_asset: &Path, result_path: &Path, tool: &PathBuf) { + let out_path = result_path.with_extension(""); + println!( + "{:?}", + &[ + "--input", + source_asset.to_str().unwrap(), + "--output", + out_path.to_str().unwrap() + ] + ); + let output = Command::new(tool) + .args([ + "--input", + source_asset.to_str().unwrap(), + "-b", + "--output", + out_path.to_str().unwrap(), + ]) + .output() + .unwrap(); + let output_result = String::from_utf8_lossy(&output.stdout); + println!("output: {}", output_result); + } + + fn process_non_fbx_file(source_asset: &Path, result_path: &Path) { + fs::rename(source_asset, result_path).unwrap(); + } + } + + fn extract_archive(archive_path: &Path, extract_to: &Path) -> io::Result<()> { + let tar_gz = File::open(archive_path)?; + let tar = GzDecoder::new(tar_gz); + let mut archive = Archive::new(tar); + archive.unpack(extract_to)?; + Ok(()) + } +}