Make map tool usable as a library (#42)

Makes map tool work for external libs. It works if you have problems merge it and fix it later.

Reviewed-on: #42
Reviewed-by: Rhys Lloyd <quaternions@noreply@itzana.me>
Co-authored-by: itzaname <me@sliving.io>
Co-committed-by: itzaname <me@sliving.io>
This commit was merged in pull request #42.
This commit is contained in:
2026-03-01 20:38:05 +00:00
committed by Rhys Lloyd
parent 06d0d70791
commit db7d7b3b36
10 changed files with 448 additions and 362 deletions

8
Cargo.lock generated
View File

@@ -2098,7 +2098,7 @@ dependencies = [
[[package]] [[package]]
name = "map-tool" name = "map-tool"
version = "1.7.2" version = "2.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@@ -3909,7 +3909,7 @@ dependencies = [
[[package]] [[package]]
name = "strafesnet_bsp_loader" name = "strafesnet_bsp_loader"
version = "0.3.1" version = "0.4.0"
dependencies = [ dependencies = [
"glam", "glam",
"strafesnet_common", "strafesnet_common",
@@ -3935,7 +3935,7 @@ dependencies = [
[[package]] [[package]]
name = "strafesnet_deferred_loader" name = "strafesnet_deferred_loader"
version = "0.5.1" version = "0.6.0"
dependencies = [ dependencies = [
"strafesnet_common", "strafesnet_common",
] ]
@@ -3964,7 +3964,7 @@ dependencies = [
[[package]] [[package]]
name = "strafesnet_rbx_loader" name = "strafesnet_rbx_loader"
version = "0.7.0" version = "0.8.0"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"glam", "glam",

View File

@@ -49,8 +49,8 @@ strafesnet_settings = { path = "engine/settings", registry = "strafesnet" }
fixed_wide = { version = "0.2.2", path = "lib/fixed_wide", registry = "strafesnet" } fixed_wide = { version = "0.2.2", path = "lib/fixed_wide", registry = "strafesnet" }
linear_ops = { version = "0.1.1", path = "lib/linear_ops", registry = "strafesnet" } linear_ops = { version = "0.1.1", path = "lib/linear_ops", registry = "strafesnet" }
ratio_ops = { version = "0.1.0", path = "lib/ratio_ops", registry = "strafesnet" } ratio_ops = { version = "0.1.0", path = "lib/ratio_ops", registry = "strafesnet" }
strafesnet_bsp_loader = { path = "lib/bsp_loader", registry = "strafesnet" } strafesnet_bsp_loader = { version = "0.4.0", path = "lib/bsp_loader", registry = "strafesnet" }
strafesnet_common = { version = "0.8.6", path = "lib/common", registry = "strafesnet" } strafesnet_common = { version = "0.8.6", path = "lib/common", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.1", path = "lib/deferred_loader", registry = "strafesnet" } strafesnet_deferred_loader = { version = "0.6.0", path = "lib/deferred_loader", registry = "strafesnet" }
strafesnet_rbx_loader = { path = "lib/rbx_loader", registry = "strafesnet" } strafesnet_rbx_loader = { version = "0.8.0", path = "lib/rbx_loader", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.2", path = "lib/snf", registry = "strafesnet" } strafesnet_snf = { version = "0.3.2", path = "lib/snf", registry = "strafesnet" }

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "strafesnet_bsp_loader" name = "strafesnet_bsp_loader"
version = "0.3.1" version = "0.4.0"
edition = "2024" edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project" repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "strafesnet_deferred_loader" name = "strafesnet_deferred_loader"
version = "0.5.1" version = "0.6.0"
edition = "2024" edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project" repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "strafesnet_rbx_loader" name = "strafesnet_rbx_loader"
version = "0.7.0" version = "0.8.0"
edition = "2024" edition = "2024"
repository = "https://git.itzana.me/StrafesNET/strafe-project" repository = "https://git.itzana.me/StrafesNET/strafe-project"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"

View File

@@ -1,15 +1,26 @@
[package] [package]
name = "map-tool" name = "map-tool"
version = "1.7.2" version = "2.0.0"
edition = "2024" edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["cli"]
cli = ["dep:clap", "tokio/macros", "tokio/rt-multi-thread", "tokio/fs", "dep:futures"]
[lib]
name = "map_tool"
[[bin]]
name = "map-tool"
required-features = ["cli"]
[dependencies] [dependencies]
anyhow = "1.0.75" anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"] } clap = { version = "4.4.2", features = ["derive"], optional = true }
flate2 = "1.0.27" flate2 = "1.0.27"
futures = "0.3.31" futures = { version = "0.3.31", optional = true }
image = "0.25.2" image = "0.25.2"
image_dds = "0.7.1" image_dds = "0.7.1"
rbx_asset = { version = "0.5.0", registry = "strafesnet" } rbx_asset = { version = "0.5.0", registry = "strafesnet" }
@@ -23,7 +34,7 @@ strafesnet_deferred_loader.workspace = true
strafesnet_rbx_loader.workspace = true strafesnet_rbx_loader.workspace = true
strafesnet_snf.workspace = true strafesnet_snf.workspace = true
thiserror = "2.0.11" thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] } tokio = { version = "1.43.0", features = ["time"] }
vbsp = "0.9.1" vbsp = "0.9.1"
vbsp-entities-css = "0.6.0" vbsp-entities-css = "0.6.0"
vmdl = "0.2.0" vmdl = "0.2.0"

2
map-tool/src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod roblox;
pub mod source;

View File

@@ -1,6 +1,3 @@
mod roblox;
mod source;
use clap::{Parser,Subcommand}; use clap::{Parser,Subcommand};
use anyhow::Result as AResult; use anyhow::Result as AResult;
@@ -15,9 +12,9 @@ struct Cli {
#[derive(Subcommand)] #[derive(Subcommand)]
enum Commands{ enum Commands{
#[command(flatten)] #[command(flatten)]
Roblox(roblox::Commands), Roblox(map_tool::roblox::Commands),
#[command(flatten)] #[command(flatten)]
Source(source::Commands), Source(map_tool::source::Commands),
} }
#[tokio::main] #[tokio::main]

View File

@@ -1,80 +1,23 @@
use std::path::{Path,PathBuf};
use std::io::{Cursor,Read,Seek}; use std::io::{Cursor,Read,Seek};
use std::collections::HashSet; use std::collections::HashSet;
use clap::{Args,Subcommand};
use anyhow::Result as AResult;
use rbx_dom_weak::Instance;
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode; use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
use rbxassetid::RobloxAssetId; pub use rbxassetid::RobloxAssetId;
use tokio::io::AsyncReadExt; use rbx_dom_weak::Instance;
// disallow non-static lifetimes // disallow non-static lifetimes
fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{ fn static_ustr(s:&'static str)->rbx_dom_weak::Ustr{
rbx_dom_weak::ustr(s) rbx_dom_weak::ustr(s)
} }
const DOWNLOAD_LIMIT:usize=16;
#[derive(Subcommand)]
pub enum Commands{
RobloxToSNF(RobloxToSNFSubcommand),
DownloadAssets(DownloadAssetsSubcommand),
}
#[derive(Args)]
pub struct RobloxToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[derive(Args)]
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
).await,
}
}
}
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[expect(dead_code)]
#[derive(Debug)] #[derive(Debug)]
enum LoadDomError{ pub enum LoadDomError{
IO(std::io::Error), IO(std::io::Error),
Binary(rbx_binary::DecodeError), Binary(rbx_binary::DecodeError),
Xml(rbx_xml::DecodeError), Xml(rbx_xml::DecodeError),
UnknownFormat, UnknownFormat,
} }
fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{ pub fn load_dom(data:&[u8])->Result<rbx_dom_weak::WeakDom,LoadDomError>{
let mut input=Cursor::new(data);
let mut first_8=[0u8;8]; let mut first_8=[0u8;8];
input.read_exact(&mut first_8).map_err(LoadDomError::IO)?; input.read_exact(&mut first_8).map_err(LoadDomError::IO)?;
input.rewind().map_err(LoadDomError::IO)?; input.rewind().map_err(LoadDomError::IO)?;
@@ -85,46 +28,6 @@ fn load_dom<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError
} }
} }
/* The ones I'm interested in:
Beam.Texture
Decal.Texture
FileMesh.MeshId
FileMesh.TextureId
MaterialVariant.ColorMap
MaterialVariant.MetalnessMap
MaterialVariant.NormalMap
MaterialVariant.RoughnessMap
MeshPart.MeshId
MeshPart.TextureID
ParticleEmitter.Texture
Sky.MoonTextureId
Sky.SkyboxBk
Sky.SkyboxDn
Sky.SkyboxFt
Sky.SkyboxLf
Sky.SkyboxRt
Sky.SkyboxUp
Sky.SunTextureId
SurfaceAppearance.ColorMap
SurfaceAppearance.MetalnessMap
SurfaceAppearance.NormalMap
SurfaceAppearance.RoughnessMap
SurfaceAppearance.TexturePack
*/
/* These properties now use Content
BaseWrap.CageMeshContent
Decal.TextureContent
ImageButton.ImageContent
ImageLabel.ImageContent
MeshPart.MeshContent
MeshPart.TextureContent
SurfaceAppearance.ColorMapContent
SurfaceAppearance.MetalnessMapContent
SurfaceAppearance.NormalMapContent
SurfaceAppearance.RoughnessMapContent
WrapLayer.ReferenceMeshContent
*/
fn accumulate_content(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&'static str){ fn accumulate_content(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&'static str){
let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(&static_ustr(property))else{ let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(&static_ustr(property))else{
println!("property={} does not exist for class={}",property,object.class.as_str()); println!("property={} does not exist for class={}",property,object.class.as_str());
@@ -151,17 +54,12 @@ fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instan
}; };
content_list.insert(asset_id); content_list.insert(asset_id);
} }
async fn read_entire_file(path:impl AsRef<Path>)->Result<Cursor<Vec<u8>>,std::io::Error>{
let mut file=tokio::fs::File::open(path).await?;
let mut data=Vec::new();
file.read_to_end(&mut data).await?;
Ok(Cursor::new(data))
}
#[derive(Default)] #[derive(Default)]
struct UniqueAssets{ pub struct UniqueAssets{
meshes:HashSet<RobloxAssetId>, pub meshes:HashSet<RobloxAssetId>,
unions:HashSet<RobloxAssetId>, pub unions:HashSet<RobloxAssetId>,
textures:HashSet<RobloxAssetId>, pub textures:HashSet<RobloxAssetId>,
} }
impl UniqueAssets{ impl UniqueAssets{
fn collect(&mut self,object:&Instance){ fn collect(&mut self,object:&Instance){
@@ -192,27 +90,224 @@ impl UniqueAssets{
} }
} }
#[expect(dead_code)] pub fn get_unique_assets(dom:rbx_dom_weak::WeakDom)->UniqueAssets{
#[derive(Debug)]
enum UniqueAssetError{
IO(std::io::Error),
LoadDom(LoadDomError),
}
async fn unique_assets(path:&Path)->Result<UniqueAssets,UniqueAssetError>{
// read entire file
let mut assets=UniqueAssets::default(); let mut assets=UniqueAssets::default();
let data=read_entire_file(path).await.map_err(UniqueAssetError::IO)?;
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
for object in dom.into_raw().1.into_values(){ for object in dom.into_raw().1.into_values(){
assets.collect(&object); assets.collect(&object);
} }
Ok(assets) assets
} }
pub fn get_unique_assets_from_file(data:&[u8])->Result<UniqueAssets,UniqueAssetError>{
let dom=load_dom(data).map_err(UniqueAssetError::LoadDom)?;
Ok(get_unique_assets(dom))
}
#[derive(Debug)]
pub enum UniqueAssetError{
LoadDom(LoadDomError),
}
#[derive(Debug)]
pub enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
RobloxRead(strafesnet_rbx_loader::ReadError),
RobloxLoad(strafesnet_rbx_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
pub struct ConvertOutput{
pub snf:Vec<u8>,
pub script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
pub convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
pub fn convert_to_snf(data:&[u8])->Result<ConvertOutput,ConvertError>{
let model=strafesnet_rbx_loader::read(data).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut snf_buf=Vec::new();
strafesnet_snf::map::write_map(Cursor::new(&mut snf_buf),map).map_err(ConvertError::SNFMap)?;
Ok(ConvertOutput{
snf:snf_buf,
script_errors,
convert_errors,
})
}
#[derive(Debug,thiserror::Error)]
pub enum ConvertTextureError{
#[error("Image error {0:?}")]
Image(#[from]image::ImageError),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
pub fn convert_texture_to_dds(data:&[u8])->Result<Vec<u8>,ConvertTextureError>{
let image=image::load_from_memory(data)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let mut buf=Vec::new();
dds.write(&mut Cursor::new(&mut buf))?;
Ok(buf)
}
#[derive(Debug)]
pub enum DownloadAssetError{
Get(rbx_asset::cookie::GetError),
IO(std::io::Error),
}
impl std::fmt::Display for DownloadAssetError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for DownloadAssetError{}
pub async fn download_asset(context:&rbx_asset::cookie::Context,asset_id:u64)->Result<Vec<u8>,DownloadAssetError>{
let data=context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id,
version:None,
}).await.map_err(DownloadAssetError::Get)?;
Ok(data.to_vec().map_err(DownloadAssetError::IO)?)
}
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
const RETRY_LIMIT:u32=12;
pub enum DownloadRetryResult{
Ok(Vec<u8>),
TimedOut,
}
pub async fn download_asset_retry(context:&rbx_asset::cookie::Context,asset_id:u64)->Result<DownloadRetryResult,DownloadAssetError>{
let mut retry=0u32;
let mut backoff=1000f32;
loop{
match context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id,
version:None,
}).await{
Ok(data)=>{
let bytes=data.to_vec().map_err(DownloadAssetError::IO)?;
break Ok(DownloadRetryResult::Ok(bytes));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body}))=>{
if status_code.as_u16()==429{
if retry==RETRY_LIMIT{
break Ok(DownloadRetryResult::TimedOut);
}
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
backoff*=BACKOFF_MUL;
retry+=1;
}else{
break Err(DownloadAssetError::Get(rbx_asset::cookie::GetError::Response(rbx_asset::types::ResponseError::Details{status_code,url_and_body})));
}
},
Err(e)=>{
break Err(DownloadAssetError::Get(e));
},
}
}
}
// --- CLI-only code ---
#[cfg(feature="cli")]
use std::path::PathBuf;
#[cfg(feature="cli")]
use anyhow::Result as AResult;
#[cfg(feature="cli")]
const DOWNLOAD_LIMIT:usize=16;
#[cfg(feature="cli")]
#[derive(clap::Subcommand)]
pub enum Commands{
RobloxToSNF(RobloxToSNFSubcommand),
DownloadAssets(DownloadAssetsSubcommand),
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct RobloxToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
#[arg(long,group="cookie",required=true)]
cookie_literal:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_envvar:Option<String>,
#[arg(long,group="cookie",required=true)]
cookie_file:Option<PathBuf>,
}
#[cfg(feature="cli")]
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::RobloxToSNF(subcommand)=>roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>download_assets(
subcommand.roblox_files,
cookie_from_args(
subcommand.cookie_literal,
subcommand.cookie_envvar,
subcommand.cookie_file,
).await?,
).await,
}
}
}
#[cfg(feature="cli")]
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<rbx_asset::cookie::Cookie>{
let cookie=match (literal,environment,file){
(Some(cookie_literal),None,None)=>cookie_literal,
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
};
Ok(rbx_asset::cookie::Cookie::new(cookie))
}
#[cfg(feature="cli")]
enum DownloadType{ enum DownloadType{
Texture(RobloxAssetId), Texture(RobloxAssetId),
Mesh(RobloxAssetId), Mesh(RobloxAssetId),
Union(RobloxAssetId), Union(RobloxAssetId),
} }
#[cfg(feature="cli")]
impl DownloadType{ impl DownloadType{
fn path(&self)->PathBuf{ fn path(&self)->PathBuf{
match self{ match self{
@@ -229,11 +324,13 @@ impl DownloadType{
} }
} }
} }
#[cfg(feature="cli")]
enum DownloadResult{ enum DownloadResult{
Cached(PathBuf), Cached(PathBuf),
Data(Vec<u8>), Data(Vec<u8>),
Failed, Failed,
} }
#[cfg(feature="cli")]
#[derive(Default,Debug)] #[derive(Default,Debug)]
struct Stats{ struct Stats{
total_assets:u32, total_assets:u32,
@@ -242,6 +339,7 @@ struct Stats{
failed_downloads:u32, failed_downloads:u32,
timed_out_downloads:u32, timed_out_downloads:u32,
} }
#[cfg(feature="cli")]
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
stats.total_assets+=1; stats.total_assets+=1;
// check if file exists on disk // check if file exists on disk
@@ -253,7 +351,6 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
let asset_id=download_instruction.asset_id(); let asset_id=download_instruction.asset_id();
// if not, download file // if not, download file
let mut retry=0; let mut retry=0;
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
let mut backoff=1000f32; let mut backoff=1000f32;
loop{ loop{
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{ let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
@@ -292,47 +389,30 @@ async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::Context,dow
} }
} }
} }
#[derive(Debug,thiserror::Error)]
enum ConvertTextureError{ #[cfg(feature="cli")]
#[error("Io error {0:?}")] async fn cli_convert_texture(RobloxAssetId(asset_id):RobloxAssetId,download_result:DownloadResult)->Result<(),CliConvertTextureError>{
Io(#[from]std::io::Error),
#[error("Image error {0:?}")]
Image(#[from]image::ImageError),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
async fn convert_texture(RobloxAssetId(asset_id):RobloxAssetId,download_result:DownloadResult)->Result<(),ConvertTextureError>{
let data=match download_result{ let data=match download_result{
DownloadResult::Cached(path)=>tokio::fs::read(path).await?, DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
DownloadResult::Data(data)=>data, DownloadResult::Data(data)=>data,
DownloadResult::Failed=>return Ok(()), DownloadResult::Failed=>return Ok(()),
}; };
// image::ImageFormat::Png let dds=convert_texture_to_dds(&data)?;
// image::ImageFormat::Jpeg
let image=image::load_from_memory(&data)?.to_rgba8();
// pick format
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let file_name=format!("textures/{asset_id}.dds"); let file_name=format!("textures/{asset_id}.dds");
let mut file=std::fs::File::create(file_name)?; tokio::fs::write(file_name,&dds).await?;
dds.write(&mut file)?;
Ok(()) Ok(())
} }
#[cfg(feature="cli")]
#[derive(Debug,thiserror::Error)]
enum CliConvertTextureError{
#[error("Io error {0:?}")]
Io(#[from]std::io::Error),
#[error("ConvertTexture error {0:?}")]
ConvertTexture(#[from]ConvertTextureError),
}
#[cfg(feature="cli")]
async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
tokio::try_join!( tokio::try_join!(
tokio::fs::create_dir_all("downloaded_textures"), tokio::fs::create_dir_all("downloaded_textures"),
@@ -356,27 +436,19 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){ while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let send=send_assets.clone(); let send=send_assets.clone();
tokio::spawn(async move{ tokio::spawn(async move{
let result=unique_assets(path.as_path()).await; let result=match tokio::fs::read(&path).await{
Ok(data)=>get_unique_assets_from_file(&data).map_err(|e|format!("{e:?}")),
Err(e)=>Err(format!("{e:?}")),
};
_=send.send(result).await; _=send.send(result).await;
drop(permit); drop(permit);
}); });
} }
}); });
// download manager // download manager
// insert into global unique assets guy
// add to download queue if the asset is globally unique and does not already exist on disk
let mut stats=Stats::default(); let mut stats=Stats::default();
let context=rbx_asset::cookie::Context::new(cookie); let context=rbx_asset::cookie::Context::new(cookie);
let mut globally_unique_assets=UniqueAssets::default(); let mut globally_unique_assets=UniqueAssets::default();
// pop a job = retry_queue.pop_front() or ingest(recv.recv().await)
// SLOW MODE:
// acquire all permits
// drop all permits
// pop one job
// if it succeeds go into fast mode
// FAST MODE:
// acquire one permit
// pop a job
let download_thread=tokio::spawn(async move{ let download_thread=tokio::spawn(async move{
while let Some(result)=recv_assets.recv().await{ while let Some(result)=recv_assets.recv().await{
let unique_assets=match result{ let unique_assets=match result{
@@ -410,7 +482,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
SEM.add_permits(thread_limit); SEM.add_permits(thread_limit);
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){ while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
tokio::spawn(async move{ tokio::spawn(async move{
let result=convert_texture(asset_id,download_result).await; let result=cli_convert_texture(asset_id,download_result).await;
drop(permit); drop(permit);
result.unwrap(); result.unwrap();
}); });
@@ -420,51 +492,7 @@ async fn download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->A
Ok(()) Ok(())
} }
#[derive(Debug)] #[cfg(feature="cli")]
#[expect(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
RobloxRead(strafesnet_rbx_loader::ReadError),
RobloxLoad(strafesnet_rbx_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
struct Errors{
script_errors:Vec<strafesnet_rbx_loader::RunnerError>,
convert_errors:strafesnet_rbx_loader::RecoverableErrors,
}
fn convert_to_snf(path:&Path,output_folder:PathBuf)->Result<Errors,ConvertError>{
let entire_file=std::fs::read(path).map_err(ConvertError::IO)?;
let model=strafesnet_rbx_loader::read(
entire_file.as_slice()
).map_err(ConvertError::RobloxRead)?;
let mut place=strafesnet_rbx_loader::Place::from(model);
let script_errors=place.run_scripts().unwrap_or_else(|e|vec![e]);
let (map,convert_errors)=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(Errors{
script_errors,
convert_errors,
})
}
async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{ async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now(); let start=std::time::Instant::now();
@@ -477,17 +505,25 @@ async fn roblox_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf)->AResult<()>{
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){ while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone(); let output_folder=output_folder.clone();
tokio::task::spawn_blocking(move||{ tokio::task::spawn_blocking(move||{
let result=convert_to_snf(path.as_path(),output_folder); let result=std::fs::read(&path).and_then(|data|{
convert_to_snf(&data).map_err(|e|std::io::Error::other(e)).and_then(|output|{
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
std::fs::write(dest,&output.snf)?;
Ok(output)
})
});
drop(permit); drop(permit);
match result{ match result{
Ok(errors)=>{ Ok(output)=>{
for error in errors.script_errors{ for error in output.script_errors{
println!("Script error: {error}"); println!("Script error: {error}");
} }
let error_count=errors.convert_errors.count(); let error_count=output.convert_errors.count();
if error_count!=0{ if error_count!=0{
println!("Error count: {error_count}"); println!("Error count: {error_count}");
println!("Errors: {}",errors.convert_errors); println!("Errors: {}",output.convert_errors);
} }
}, },
Err(e)=>println!("Convert error: {e:?}"), Err(e)=>println!("Convert error: {e:?}"),

View File

@@ -1,60 +1,15 @@
use std::path::{Path,PathBuf}; use std::path::PathBuf;
use std::borrow::Cow; use std::borrow::Cow;
use clap::{Args,Subcommand}; use std::io::Cursor;
use anyhow::Result as AResult;
use futures::StreamExt;
use strafesnet_bsp_loader::loader::BspFinder; use strafesnet_bsp_loader::loader::BspFinder;
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
#[cfg(feature="cli")]
use strafesnet_deferred_loader::loader::Loader; use strafesnet_deferred_loader::loader::Loader;
use strafesnet_deferred_loader::deferred_loader::{LoadFailureMode,MeshDeferredLoader,RenderConfigDeferredLoader}; #[cfg(feature="cli")]
use strafesnet_deferred_loader::deferred_loader::{MeshDeferredLoader,RenderConfigDeferredLoader};
#[cfg(feature="cli")]
use vbsp_entities_css::Entity; use vbsp_entities_css::Entity;
#[derive(Subcommand)]
pub enum Commands{
SourceToSNF(SourceToSNFSubcommand),
ExtractTextures(ExtractTexturesSubcommand),
VPKContents(VPKContentsSubcommand),
BSPContents(BSPContentsSubcommand),
}
#[derive(Args)]
pub struct SourceToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct ExtractTexturesSubcommand{
#[arg(required=true)]
bsp_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct VPKContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[derive(Args)]
pub struct BSPContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
}
}
}
enum VMTContent{ enum VMTContent{
VMT(String), VMT(String),
VTF(String), VTF(String),
@@ -97,7 +52,7 @@ fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
} }
#[derive(Debug,thiserror::Error)] #[derive(Debug,thiserror::Error)]
enum GetVMTError{ pub enum GetVMTError{
#[error("Bsp error {0:?}")] #[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError), Bsp(#[from]vbsp::BspError),
#[error("Utf8 error {0:?}")] #[error("Utf8 error {0:?}")]
@@ -113,12 +68,11 @@ fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Mate
//decode vmt and then write //decode vmt and then write
let vmt_str=core::str::from_utf8(&vmt_data)?; let vmt_str=core::str::from_utf8(&vmt_data)?;
let material=vmt_parser::from_str(vmt_str)?; let material=vmt_parser::from_str(vmt_str)?;
//println!("vmt material={:?}",material);
Ok(material) Ok(material)
} }
#[derive(Debug,thiserror::Error)] #[derive(Debug,thiserror::Error)]
enum LoadVMTError{ pub enum LoadVMTError{
#[error("Bsp error {0:?}")] #[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError), Bsp(#[from]vbsp::BspError),
#[error("GetVMT error {0:?}")] #[error("GetVMT error {0:?}")]
@@ -164,7 +118,7 @@ fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_p
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved), VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
} }
} }
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError> pub fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
where where
'bsp:'a, 'bsp:'a,
'vpk:'a, 'vpk:'a,
@@ -223,6 +177,131 @@ fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Re
Ok(None) Ok(None)
} }
#[derive(Debug)]
pub enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
BspRead(strafesnet_bsp_loader::ReadError),
BspLoad(strafesnet_bsp_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
pub fn convert_to_snf(bsp_data:&[u8],vpk_list:&[strafesnet_bsp_loader::Vpk])->Result<Vec<u8>,ConvertError>{
let bsp=strafesnet_bsp_loader::read(
Cursor::new(bsp_data)
).map_err(ConvertError::BspRead)?;
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
let mut snf_buf=Vec::new();
strafesnet_snf::map::write_map(Cursor::new(&mut snf_buf),map).map_err(ConvertError::SNFMap)?;
Ok(snf_buf)
}
#[derive(Debug,thiserror::Error)]
pub enum ConvertTextureError{
#[error("Vtf error {0:?}")]
Vtf(#[from]vtf::Error),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
pub fn convert_texture_to_dds(vtf_data:&[u8])->Result<Vec<u8>,ConvertTextureError>{
let vtf_vec=vtf_data.to_vec();
let image=vtf::from_bytes(&vtf_vec)?.highres_image.decode(0)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let mut buf=Vec::new();
dds.write(&mut Cursor::new(&mut buf))?;
Ok(buf)
}
pub fn read_vpks(vpk_paths:&[PathBuf])->Result<Vec<strafesnet_bsp_loader::Vpk>,vpk::Error>{
vpk_paths.iter().map(|vpk_path|{
Ok(strafesnet_bsp_loader::Vpk::new(vpk::VPK::read(vpk_path)?))
}).collect()
}
// --- CLI-only code ---
#[cfg(feature="cli")]
use anyhow::Result as AResult;
#[cfg(feature="cli")]
use futures::StreamExt;
#[cfg(feature="cli")]
#[derive(clap::Subcommand)]
pub enum Commands{
SourceToSNF(SourceToSNFSubcommand),
ExtractTextures(ExtractTexturesSubcommand),
VPKContents(VPKContentsSubcommand),
BSPContents(BSPContentsSubcommand),
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct SourceToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct ExtractTexturesSubcommand{
#[arg(required=true)]
bsp_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct VPKContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[cfg(feature="cli")]
#[derive(clap::Args)]
pub struct BSPContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[cfg(feature="cli")]
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::SourceToSNF(subcommand)=>source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
Commands::ExtractTextures(subcommand)=>extract_textures(subcommand.bsp_files,subcommand.vpks).await,
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
}
}
}
#[cfg(feature="cli")]
#[derive(Debug,thiserror::Error)] #[derive(Debug,thiserror::Error)]
enum ExtractTextureError{ enum ExtractTextureError{
#[error("Io error {0:?}")] #[error("Io error {0:?}")]
@@ -234,7 +313,8 @@ enum ExtractTextureError{
#[error("Load VMT error {0:?}")] #[error("Load VMT error {0:?}")]
LoadVMT(#[from]LoadVMTError), LoadVMT(#[from]LoadVMTError),
} }
async fn gimme_them_textures(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{ #[cfg(feature="cli")]
async fn gimme_them_textures(path:&std::path::Path,vpk_list:&[strafesnet_bsp_loader::Vpk],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?; let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp); let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
let bsp=loader_bsp.as_ref(); let bsp=loader_bsp.as_ref();
@@ -336,57 +416,38 @@ async fn gimme_them_textures(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],s
Ok(()) Ok(())
} }
#[cfg(feature="cli")]
#[derive(Debug,thiserror::Error)] #[derive(Debug,thiserror::Error)]
enum ConvertTextureError{ enum CliConvertTextureError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("Vtf error {0:?}")]
Vtf(#[from]vtf::Error),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
#[error("Io error {0:?}")] #[error("Io error {0:?}")]
Io(#[from]std::io::Error), Io(#[from]std::io::Error),
#[error("ConvertTexture error {0:?}")]
ConvertTexture(#[from]ConvertTextureError),
} }
async fn convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),ConvertTextureError>{ #[cfg(feature="cli")]
let image=vtf::from_bytes(&texture)?.highres_image.decode(0)?.to_rgba8(); async fn cli_convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<std::path::Path>)->Result<(),CliConvertTextureError>{
let dds=convert_texture_to_dds(&texture)?;
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
//this fails if the image dimensions are not a multiple of 4
let dds = image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
//write dds
let mut dest=PathBuf::from("textures"); let mut dest=PathBuf::from("textures");
dest.push(write_file_name); dest.push(write_file_name);
dest.set_extension("dds"); dest.set_extension("dds");
std::fs::create_dir_all(dest.parent().unwrap())?; std::fs::create_dir_all(dest.parent().unwrap())?;
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?); std::fs::write(&dest,&dds)?;
dds.write(&mut writer)?;
Ok(()) Ok(())
} }
async fn read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<strafesnet_bsp_loader::Vpk>{ #[cfg(feature="cli")]
async fn async_read_vpks(vpk_paths:Vec<PathBuf>,thread_limit:usize)->Vec<strafesnet_bsp_loader::Vpk>{
futures::stream::iter(vpk_paths).map(|vpk_path|async{ futures::stream::iter(vpk_paths).map(|vpk_path|async{
// idk why it doesn't want to pass out the errors but this is fatal anyways
tokio::task::spawn_blocking(move||Ok::<_,vpk::Error>(strafesnet_bsp_loader::Vpk::new(vpk::VPK::read(&vpk_path)?))).await.unwrap().unwrap() tokio::task::spawn_blocking(move||Ok::<_,vpk::Error>(strafesnet_bsp_loader::Vpk::new(vpk::VPK::read(&vpk_path)?))).await.unwrap().unwrap()
}) })
.buffer_unordered(thread_limit) .buffer_unordered(thread_limit)
.collect().await .collect().await
} }
#[cfg(feature="cli")]
async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{ async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
tokio::try_join!( tokio::try_join!(
tokio::fs::create_dir_all("extracted_textures"), tokio::fs::create_dir_all("extracted_textures"),
@@ -396,7 +457,7 @@ async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()
let thread_limit=std::thread::available_parallelism()?.get(); let thread_limit=std::thread::available_parallelism()?.get();
// load vpk list and leak for static lifetime // load vpk list and leak for static lifetime
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak(); let vpk_list:&[strafesnet_bsp_loader::Vpk]=async_read_vpks(vpk_paths,thread_limit).await.leak();
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit); let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
let mut it=paths.into_iter(); let mut it=paths.into_iter();
@@ -422,7 +483,7 @@ async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){ while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
// TODO: dedup dest? // TODO: dedup dest?
tokio::spawn(async move{ tokio::spawn(async move{
let result=convert_texture(data,dest).await; let result=cli_convert_texture(data,dest).await;
drop(permit); drop(permit);
match result{ match result{
Ok(())=>(), Ok(())=>(),
@@ -435,6 +496,7 @@ async fn extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()
Ok(()) Ok(())
} }
#[cfg(feature="cli")]
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{ fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
let vpk_index=vpk::VPK::read(&vpk_path)?; let vpk_index=vpk::VPK::read(&vpk_path)?;
for (label,entry) in vpk_index.tree.into_iter(){ for (label,entry) in vpk_index.tree.into_iter(){
@@ -443,6 +505,7 @@ fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
Ok(()) Ok(())
} }
#[cfg(feature="cli")]
fn bsp_contents(path:PathBuf)->AResult<()>{ fn bsp_contents(path:PathBuf)->AResult<()>{
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?; let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){ for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
@@ -451,46 +514,14 @@ fn bsp_contents(path:PathBuf)->AResult<()>{
Ok(()) Ok(())
} }
#[derive(Debug)] #[cfg(feature="cli")]
#[expect(dead_code)]
enum ConvertError{
IO(std::io::Error),
SNFMap(strafesnet_snf::map::Error),
BspRead(strafesnet_bsp_loader::ReadError),
BspLoad(strafesnet_bsp_loader::LoadError),
}
impl std::fmt::Display for ConvertError{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for ConvertError{}
async fn convert_to_snf(path:&Path,vpk_list:&[strafesnet_bsp_loader::Vpk],output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let bsp=strafesnet_bsp_loader::read(
std::io::Cursor::new(entire_file)
).map_err(ConvertError::BspRead)?;
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
let file=std::fs::File::create(dest).map_err(ConvertError::IO)?;
strafesnet_snf::map::write_map(file,map).map_err(ConvertError::SNFMap)?;
Ok(())
}
async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{ async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now(); let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get(); let thread_limit=std::thread::available_parallelism()?.get();
// load vpk list and leak for static lifetime // load vpk list and leak for static lifetime
let vpk_list:&[strafesnet_bsp_loader::Vpk]=read_vpks(vpk_paths,thread_limit).await.leak(); let vpk_list:&[strafesnet_bsp_loader::Vpk]=async_read_vpks(vpk_paths,thread_limit).await.leak();
let mut it=paths.into_iter(); let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0); static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
@@ -499,7 +530,16 @@ async fn source_to_snf(paths:Vec<PathBuf>,output_folder:PathBuf,vpk_paths:Vec<Pa
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){ while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone(); let output_folder=output_folder.clone();
tokio::spawn(async move{ tokio::spawn(async move{
let result=convert_to_snf(path.as_path(),vpk_list,output_folder).await; let result=match tokio::fs::read(&path).await{
Ok(data)=>convert_to_snf(&data,vpk_list).map_err(|e|anyhow::anyhow!("{e:?}")).and_then(|snf_buf|{
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
std::fs::write(dest,&snf_buf)?;
Ok(())
}),
Err(e)=>Err(e.into()),
};
drop(permit); drop(permit);
match result{ match result{
Ok(())=>(), Ok(())=>(),