93 Commits

Author SHA1 Message Date
5f933407a9 Support library usage 2026-02-28 23:53:58 -05:00
205db9a0db v0.7.0 loader rewrite 2025-02-04 09:09:10 -08:00
ca50bf35c2 loader + tokio rewrite 2025-02-04 09:09:01 -08:00
6522c255cd remove common 2025-02-04 09:06:27 -08:00
a5079f21d7 wip download_assets (pre-tokio) 2025-01-27 08:48:11 -08:00
349cd9c233 v1.6.0 split roblox and source 2025-01-27 07:51:39 -08:00
d455cf4dc9 Add strafesnet registry 2025-01-27 07:51:39 -08:00
3227a6486a update deps 2025-01-27 07:51:39 -08:00
1ce51dd4da split commands into roblox and source 2025-01-27 07:45:38 -08:00
1ad9723905 move writeattributes to mapfixer 2025-01-27 07:04:05 -08:00
41b28fa7d2 v1.5.7 update rbx_loader 2024-10-04 20:04:13 -07:00
a2ab23097b update rbx_loader 2024-10-04 20:03:11 -07:00
602061b44c v1.5.6 update rbx_loader 2024-10-03 20:35:54 -07:00
1989369956 update rbx_loader 2024-10-03 20:33:59 -07:00
a18aea828c v1.5.5 update deps 2024-10-01 17:24:43 -07:00
b7000ee9af update deps 2024-10-01 17:20:43 -07:00
2b77ea5712 v1.5.4 update to asset-tool 0.4.X + improve asset id parsing 2024-10-01 13:15:48 -07:00
cf98f8e7bb use rbxassetid parser from deferred_loader 2024-10-01 13:02:54 -07:00
a56c114d08 Automatically create sub-directories when downloading assets; move dds files into the textures folder
the latter change is because roblox-to-snf expects the files in textures, not textures/dds
2024-09-25 23:48:59 +01:00
b6a5324ae7 Fix asset-tool invocations when downloading assets 2024-09-25 23:43:54 +01:00
6f5a3c5176 v1.5.3 roblox emulator 2024-09-21 13:48:23 -07:00
6bab31f3b3 silence dead code 2024-09-21 13:47:40 -07:00
9cdeed160f update rbx_loader & run scripts 2024-09-21 13:45:32 -07:00
d0c59b51a4 v1.5.2 2024-07-31 11:51:38 -07:00
451f3ccecb source to snf 2024-07-31 11:51:18 -07:00
ed9701981d limit parallel threads by waiting for the first thread to complete 2024-07-30 12:24:23 -07:00
60e0197344 v1.5.1 2024-07-29 16:48:02 -07:00
4d97a490c1 convert snf 2024-07-29 16:48:02 -07:00
52ba44c6be named args 2024-04-19 00:44:05 -07:00
95b6272b18 more texture sources + use asset tool to download meshes & textures 2024-04-19 00:44:05 -07:00
0172675b04 v1.5.0 rewrite clap usage + remove mapfixer stuff 2024-03-08 10:43:36 -08:00
982b4aecac rewrite clap usage 2024-03-08 10:43:36 -08:00
c1ddcdb0c5 remove mapfixer + asset-tool functions 2024-03-08 10:43:36 -08:00
c2d0a4487c misc edits 2024-03-08 10:01:54 -08:00
dc9fd2c442 import PathBuf 2024-03-08 09:55:17 -08:00
4199d41d3f timeless License 2024-01-30 18:38:47 -08:00
7fbcb206ff probably was wrong but idc about testing it 2024-01-30 16:39:57 -08:00
a17901d473 v1.4.0 valve maps 2024-01-12 11:34:09 -08:00
b88c6b899a commands for valve maps 2024-01-12 11:32:12 -08:00
835d4bbecd add valve map deps 2024-01-12 11:32:12 -08:00
b756dc979c move main to top 2024-01-12 11:32:12 -08:00
1e888ebb01 tabs 2024-01-09 01:36:14 -08:00
b9dccb1af5 update deps 2023-11-18 16:13:24 -08:00
c6d293cc6b write "Ref" attribute based on internal roblox part id 2023-11-18 15:53:37 -08:00
a386f90f51 switch to lazy_regex 2023-11-18 15:47:54 -08:00
43115cbac6 mesh downloader 2023-11-09 15:51:23 -08:00
35b5aff9a7 stop being cringe 2023-11-09 15:51:11 -08:00
36419af870 update deps 2023-10-27 18:10:11 -07:00
a7518bef46 print full path 2023-10-27 18:10:05 -07:00
6df1f41599 not a bug 2023-10-19 03:00:30 -07:00
422d0a160d print special message for roblox xml 2023-10-19 03:00:30 -07:00
1727f9213c it's actually jfif 2023-10-19 03:00:30 -07:00
afa9e7447d print fourcc on unrecognized image format 2023-10-19 03:00:30 -07:00
ff85efa54f add unzip-all command 2023-10-19 03:00:30 -07:00
fa69c53cfc JPEG support 2023-10-19 03:00:30 -07:00
a57c228580 styling 2023-10-19 03:00:30 -07:00
5dc69db885 extremely fancy code to transparently handle gzip 2023-10-19 03:00:30 -07:00
e54400a436 alphabetize for no reason 2023-10-19 03:00:30 -07:00
e2a5edf8df Fill in placeholder values
Nobody told me I was chilling with placeholder values in my license
2023-10-02 03:46:17 -04:00
d6dd1b8abd drop Jeftai Error, convert errors to anyhow 2023-10-01 20:08:43 -07:00
a2b793fcd3 add anyhow dep 2023-10-01 20:07:11 -07:00
9cb34f14c8 fixme 2023-09-30 03:38:34 -07:00
bd2e3aa2d3 v1.3.0 srgb textures + non mod 4 images export to uncompressed + rbxmx support 2023-09-30 03:15:57 -07:00
07f6053839 multiplex roblox format 2023-09-30 03:14:06 -07:00
0d5b918ea1 add rbx_xml dep 2023-09-30 03:14:05 -07:00
20a568220a use plain format if image is not the right shape 2023-09-29 18:30:01 -07:00
d670d4129e use srgb 2023-09-29 13:27:23 -07:00
de7b0bd5cc stop failing when maps are corrupted while downloading textures 2023-09-29 03:14:29 -07:00
01524146c7 fixme 2023-09-29 02:58:49 -07:00
45e8e415d0 v1.2.0 convert-textures
gzip no workie yet
some pngs upset the decoder lib
2023-09-29 02:58:49 -07:00
4417bafc5c naive multithreaded convert downloaded textures to dds 2023-09-29 02:58:32 -07:00
8553625738 change texture download path 2023-09-28 22:05:25 -07:00
3a3749eaeb add deps image, gzip (flate2), image_dds 2023-09-28 22:05:00 -07:00
53539f290b print map file 2023-09-23 13:07:52 -07:00
479dd37f53 v1.1.0 download-textures 2023-09-22 14:27:06 -07:00
34b6a869f0 add download textures command 2023-09-22 14:24:28 -07:00
19a455ee5e print wget exit status 2023-09-22 14:23:29 -07:00
9904b7a044 switch extract to pathbuf 2023-09-22 14:22:44 -07:00
6efa811eb6 generalize recursive_collect 2023-09-22 14:21:22 -07:00
81e4a201bd add delete to interactive upload 2023-09-15 17:10:33 -07:00
8fd5618af2 tweak prompt 2023-09-14 17:05:22 -07:00
54c26d6e1e ARE YOU STUPID 2023-09-14 16:23:53 -07:00
110ec94a08 v1.0.0 map tool implements some subset of operations at every step the map verification process from start to finish 2023-09-14 16:23:53 -07:00
980da5a6a7 write approximate code for uploading new asset 2023-09-14 16:22:15 -07:00
1cd77984d4 wait for downloads before exiting 2023-09-14 14:32:25 -07:00
b0fe231388 Merge pull request 'stop macro from returning' (#2) from Jeftaei/map-tool:master into master
Reviewed-on: StrafesNET/map-tool#2
2023-09-14 17:19:01 -04:00
5a4a39ab75 add upload action + wait for status code 2023-09-14 14:17:00 -07:00
4c485e76e4 implement upload 2023-09-14 13:54:08 -07:00
7bbb9ca24f have to write the model ref to make a valid model file 2023-09-14 13:53:52 -07:00
eff55af1b4 models are rbxm 2023-09-14 12:06:02 -07:00
0d05cc9996 comment on laziness 2023-09-13 21:22:28 -07:00
2a55ef90df don't replace std Result 2023-09-13 21:04:33 -07:00
1a6202ae66 Merge pull request 'Nice looking errors' (#1) from Jeftaei/map-tool:master into master
Reviewed-on: StrafesNET/map-tool#1
2023-09-13 16:51:24 -04:00
10 changed files with 4336 additions and 759 deletions

2
.cargo/config.toml Normal file
View File

@@ -0,0 +1,2 @@
[registries.strafesnet]
index = "sparse+https://git.itzana.me/api/packages/strafesnet/cargo/"

3526
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,49 @@
[package]
name = "map-tool"
version = "0.1.0"
version = "2.0.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = { version = "4.4.2", features = ["derive"] }
rbx_binary = "0.7.1"
rbx_dom_weak = "2.5.0"
rbx_reflection_database = "0.2.7"
[lib]
name = "map_tool"
path = "src/lib.rs"
[profile.release]
lto = true
strip = true
codegen-units = 1
[[bin]]
name = "map-tool"
path = "src/main.rs"
required-features = ["cli"]
[features]
cli = ["dep:clap"]
[dependencies]
anyhow = "1.0.75"
clap = { version = "4.4.2", features = ["derive"], optional = true }
flate2 = "1.0.27"
futures = "0.3.31"
image = "0.25.2"
image_dds = "0.7.1"
lazy-regex = "3.1.0"
rbx_asset = { version = "0.2.5", registry = "strafesnet" }
rbx_binary = { version = "0.7.4", registry = "strafesnet" }
rbx_dom_weak = { version = "2.7.0", registry = "strafesnet" }
rbx_reflection_database = { version = "0.2.10", registry = "strafesnet" }
rbx_xml = { version = "0.13.3", registry = "strafesnet" }
rbxassetid = { version = "0.1.0", registry = "strafesnet" }
strafesnet_bsp_loader = { version = "0.3.0", registry = "strafesnet" }
strafesnet_deferred_loader = { version = "0.5.0", registry = "strafesnet" }
strafesnet_rbx_loader = { version = "0.6.0", registry = "strafesnet" }
strafesnet_snf = { version = "0.3.0", registry = "strafesnet" }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "fs"] }
vbsp = "0.6.0"
vmdl = "0.2.0"
vmt-parser = "0.2.0"
vpk = "0.2.0"
vtf = "0.3.0"
#[profile.release]
#lto = true
#strip = true
#codegen-units = 1

28
LICENSE
View File

@@ -1,9 +1,23 @@
MIT License
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
Copyright (c) <year> <copyright holders>
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,21 +0,0 @@
#[derive(Debug)]
pub struct Error {
pub message: String,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for Error {}
impl Error {
// has to be Box<Self> to fit with the result in prelude.rs
pub fn new(message: &str) -> Box<Self> {
Box::new(Self {
message: message.to_string(),
})
}
}

2
src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod roblox;
pub mod source;

View File

@@ -1,541 +1,27 @@
use std::unimplemented;
use clap::{Args, Parser, Subcommand};
mod error;
mod prelude;
// this * means we are importing everything from the prelude module and in turn we overwrite the default `Result` with our own
// if you want the original back you can use StdResult<T, E> or just std::result::Result<T, E>
// using the new result also means the error type is implicitly Box<dyn std::error::Error> (works for any errors that implement the std::error::Error trait)
use crate::prelude::*;
use clap::{Parser,Subcommand};
use anyhow::Result as AResult;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Download(MapList),
Upload,
Scan,
Extract(Map),
Replace,
Interactive,
enum Commands{
#[command(flatten)]
Roblox(map_tool::roblox::Commands),
#[command(flatten)]
Source(map_tool::source::Commands),
}
#[derive(Args)]
struct Map {
id:u64,
}
#[derive(Args)]
struct MapList {
maps: Vec<u64>,
}
fn class_is_a(class: &str, superclass: &str) -> bool {
if class==superclass {
return true
}
let class_descriptor=rbx_reflection_database::get().classes.get(class);
if let Some(descriptor) = &class_descriptor {
if let Some(class_super) = &descriptor.superclass {
return class_is_a(&class_super, superclass)
}
}
return false
}
fn recursive_collect_scripts(scripts: &mut std::vec::Vec<rbx_dom_weak::types::Ref>,dom: &rbx_dom_weak::WeakDom, instance: &rbx_dom_weak::Instance){
for &referent in instance.children() {
if let Some(c) = dom.get_by_ref(referent) {
if class_is_a(c.class.as_str(), "LuaSourceContainer") {
scripts.push(c.referent());//copy ref
}
recursive_collect_scripts(scripts,dom,c);
}
}
}
fn get_full_name(dom:&rbx_dom_weak::WeakDom,instance:&rbx_dom_weak::Instance) -> String{
let mut full_name=instance.name.clone();
let mut pref=instance.parent();
while let Some(parent)=dom.get_by_ref(pref){
full_name.insert(0, '.');
full_name.insert_str(0, &parent.name);
pref=parent.parent();
}
full_name
}
//download
//download list of maps to maps/unprocessed
//scan (scripts)
//iter maps/unprocessed
//passing moves to maps/verified
//failing moves to maps/blocked
//replace (edits & deletions)
//iter maps/blocked
//replace scripts and put in maps/unprocessed
//upload
//iter maps/verified
//interactively print DisplayName/Creator and ask for target upload ids
//interactive
//iter maps/unprocessed
//for each unique script, load it into the file current.lua and have it open in sublime text
//I can edit the file and it will edit it in place
//I pass/fail(with comment)/allow each script
fn get_script_refs(dom:&rbx_dom_weak::WeakDom) -> Vec<rbx_dom_weak::types::Ref>{
let mut scripts = std::vec::Vec::new();
recursive_collect_scripts(&mut scripts, dom, dom.root());
scripts
}
fn get_id() -> Result<u32>{
match std::fs::read_to_string("id"){
Ok(id_file)=>Ok(id_file.parse::<u32>()?),
Err(e) => match e.kind() {
std::io::ErrorKind::NotFound => Ok(0),//implicitly take on id=0
_ => Err(e)?,
}
}
}
fn get_set_from_file(file:&str) -> Result<std::collections::HashSet<String>>{
let mut set=std::collections::HashSet::<String>::new();
for entry in std::fs::read_dir(file)? {
set.insert(std::fs::read_to_string(entry?.path())?);
}
Ok(set)
}
fn get_allowed_set() -> Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/allowed")
}
fn get_blocked() -> Result<std::collections::HashSet<String>>{
get_set_from_file("scripts/blocked")
}
fn get_allowed_map() -> Result<std::collections::HashMap::<u32,String>>{
let mut allowed_map = std::collections::HashMap::<u32,String>::new();
for entry in std::fs::read_dir("scripts/allowed")? {
let entry=entry?;
allowed_map.insert(entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?,std::fs::read_to_string(entry.path())?);
}
Ok(allowed_map)
}
fn get_replace_map() -> Result<std::collections::HashMap::<String,u32>>{
let mut replace = std::collections::HashMap::<String,u32>::new();
for entry in std::fs::read_dir("scripts/replace")? {
let entry=entry?;
replace.insert(std::fs::read_to_string(entry.path())?,entry.path().file_stem().unwrap().to_str().unwrap().parse::<u32>()?);
}
Ok(replace)
}
fn check_source_illegal_keywords(source:&String)->bool{
source.find("getfenv").is_some()||source.find("require").is_some()
}
fn download(map_list: Vec<u64>) -> Result<()>{
let header=format!("Cookie: .ROBLOSECURITY={}",std::env::var("RBXCOOKIE")?);
let shared_args=&[
"-q",
"--header",
header.as_str(),
"-O",
];
for map_id in map_list.iter() {
std::process::Command::new("wget")
.args(shared_args)
.arg(format!("maps/unprocessed/{}.rbxl",map_id))
.arg(format!("https://assetdelivery.roblox.com/v1/asset/?ID={}",map_id))
.spawn()?;
}
Ok(())
}
enum Scan{
Passed,
Blocked,
Flagged,
}
fn scan() -> Result<()>{
let mut id = get_id()?;
//Construct allowed scripts
let allowed_set = get_allowed_set()?;
let mut blocked = get_blocked()?;
for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
let input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let dom = rbx_binary::from_reader(input)?;
let script_refs = get_script_refs(&dom);
//check scribb
let mut fail_count=0;
let mut fail_type=Scan::Passed;
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
//flag keywords and instantly fail
if check_source_illegal_keywords(s){
println!("{:?} - flagged.",file_thing.file_name());
fail_type=Scan::Flagged;
break;
}
if allowed_set.contains(s) {
continue;
}else{
fail_type=Scan::Blocked;//no need to check for Flagged, it breaks the loop.
fail_count+=1;
if !blocked.contains(s) {
blocked.insert(s.clone());//all fixed! just clone!
std::fs::write(format!("scripts/blocked/{}.lua",id),s)?;
id+=1;
}
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
let mut dest=match fail_type {
Scan::Passed => std::path::PathBuf::from("maps/processed"),
Scan::Blocked => {
println!("{:?} - {} {} not allowed.",file_thing.file_name(),fail_count,if fail_count==1 {"script"}else{"scripts"});
std::path::PathBuf::from("maps/blocked")
}
Scan::Flagged => std::path::PathBuf::from("maps/flagged")
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
std::fs::write("id",id.to_string())?;
Ok(())
}
fn extract(file_id:u64) -> Result<()>{
let mut id = 0;
//Construct allowed scripts
let mut script_set = std::collections::HashSet::<String>::new();
let file_id_string=file_id.to_string();
for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
if file_thing.file_name().to_str().unwrap().find(&file_id_string).is_none(){
continue;
}
let input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let dom = rbx_binary::from_reader(input)?;
let script_refs = get_script_refs(&dom);
//extract scribb
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(s)) = script.properties.get("Source") {
if script_set.contains(s) {
continue;
}else{
script_set.insert(s.clone());
std::fs::write(format!("scripts/extracted/{:?}_{}_{}.lua",file_thing.file_name(),id,script.name),s)?;
id+=1;
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
}
println!("extracted {} {}",id,if id==1 {"script"}else{"scripts"});
Ok(())
}
fn replace() -> Result<()>{
let allowed_map=get_allowed_map()?;
let replace_map=get_replace_map()?;
for entry in std::fs::read_dir("maps/blocked")? {
let file_thing=entry?;
let input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = rbx_binary::from_reader(input)?;
let script_refs = get_script_refs(&dom);
//check scribb
let mut any_failed=false;
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
if let (Some(replace_id),Some(replace_script))=(replace_map.get(source),dom.get_by_ref_mut(script.referent())) {
println!("replace {}",replace_id);
//replace the source
if let Some(replace_source)=allowed_map.get(replace_id){
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
}else{
println!("failed to get replacement source {}",replace_id);
any_failed=true;
}
}else{
println!("failed to failed to get replace_id and replace_script");
any_failed=true;
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
if any_failed {
println!("One or more scripts failed to replace.");
}else{
let mut dest=std::path::PathBuf::from("maps/unprocessed");
dest.set_file_name(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::open(dest)?);
rbx_binary::to_writer(output, &dom, &[dom.root_ref()])?;
}
}
Ok(())
}
fn upload() -> Result<()>{
//interactive prompt per upload:
//Creator: [auto fill creator]
//DisplayName: [auto fill DisplayName]
//id: ["New" for blank because of my double enter key]
// std::process::Command::new("rbxcompiler")
// .arg("--compile=false")
// .arg("--group=6980477")
// .arg("--asset=5692139100")
// .arg("--input=map.rbxm")
// .spawn()?;
unimplemented!()
}
enum Interactive{
Passed,
Blocked,
Flagged,
}
enum ScriptAction {
Pass,
Replace(u32),
Flag,
Block,
Delete,
}
enum ScriptActionParseResult {
Pass,
Block,
Exit,
Delete,
}
struct ParseScriptActionErr;
impl std::str::FromStr for ScriptActionParseResult {
type Err=ParseScriptActionErr;
fn from_str(s: &str) -> StdResult<Self, Self::Err>{
if s=="pass\n"||s=="1\n"{
Ok(Self::Pass)
}else if s=="block\n"{
Ok(Self::Block)
}else if s=="exit\n"{
Ok(Self::Exit)
}else if s=="delete\n"{
Ok(Self::Delete)
}else{
Err(ParseScriptActionErr)
}
}
}
fn interactive() -> Result<()>{
let mut id=get_id()?;
//Construct allowed scripts
let mut allowed_set=get_allowed_set()?;
let mut allowed_map=get_allowed_map()?;
let mut replace_map=get_replace_map()?;
let mut blocked = get_blocked()?;
'map_loop: for entry in std::fs::read_dir("maps/unprocessed")? {
let file_thing=entry?;
println!("processing map={:?}",file_thing.file_name());
let input = std::io::BufReader::new(std::fs::File::open(file_thing.path())?);
let mut dom = rbx_binary::from_reader(input)?;
let script_refs = get_script_refs(&dom);
//check scribb
let mut script_count=0;
let mut replace_count=0;
let mut block_count=0;
let mut fail_type=Interactive::Passed;
for &script_ref in script_refs.iter() {
if let Some(script)=dom.get_by_ref(script_ref){
if let Some(rbx_dom_weak::types::Variant::String(source)) = script.properties.get("Source") {
script_count+=1;
let source_action=if check_source_illegal_keywords(source) {
ScriptAction::Flag//script triggers flagging -> Flag
} else if blocked.contains(source) {
ScriptAction::Block//script is blocked -> Block
} else if allowed_set.contains(source) {
ScriptAction::Pass//script is allowed -> Pass
}else if let Some(replace_id)=replace_map.get(source) {
ScriptAction::Replace(*replace_id)
}else{
//interactive logic goes here
print!("unresolved source location={}\naction: ",get_full_name(&dom, script));
std::io::Write::flush(&mut std::io::stdout())?;
//load source into current.lua
std::fs::write("current.lua",source)?;
//prompt action in terminal
//wait for input
let script_action;
loop{
let mut action_string = String::new();
std::io::stdin().read_line(&mut action_string)?;
if let Ok(parsed_script_action)=action_string.parse::<ScriptActionParseResult>(){
script_action=parsed_script_action;
break;
}else{
print!("action: ");
std::io::Write::flush(&mut std::io::stdout())?;
}
}
//update allowed/replace/blocked
match script_action{
ScriptActionParseResult::Pass => {
//if current.lua was updated, create an allowed and replace file and set script_action to replace(new_id)
let modified_source=std::fs::read_to_string("current.lua")?;
if &modified_source==source{
//it's always new.
//insert allowed_set
allowed_set.insert(modified_source.clone());
//insert allowed_map
allowed_map.insert(id,modified_source.clone());
//write allowed/id.lua
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
id+=1;
ScriptAction::Pass
}else{
//insert allowed_set
allowed_set.insert(modified_source.clone());
//insert allowed_map
allowed_map.insert(id,modified_source.clone());
//insert replace_map
replace_map.insert(source.clone(),id);//this cannot be reached if it already exists
//write allowed/id.lua
std::fs::write(format!("scripts/allowed/{}.lua",id),modified_source)?;
//write replace/id.lua
std::fs::write(format!("scripts/replace/{}.lua",id),source)?;
let ret=ScriptAction::Replace(id);
id+=1;
ret
}
},
ScriptActionParseResult::Block => {
blocked.insert(source.clone());
std::fs::write(format!("scripts/blocked/{}.lua",id),source)?;
id+=1;
ScriptAction::Block
},
ScriptActionParseResult::Exit => break 'map_loop,
ScriptActionParseResult::Delete => ScriptAction::Delete,
}
};
let location=get_full_name(&dom, script);
match source_action{
ScriptAction::Pass => println!("passed source location={}",location),
ScriptAction::Replace(replace_id)=>{
//replace the source
if let (Some(replace_source),Some(replace_script))=(allowed_map.get(&replace_id),dom.get_by_ref_mut(script.referent())){
replace_count+=1;
println!("replaced source id={} location={}",replace_id,location);
replace_script.properties.insert("Source".to_string(), rbx_dom_weak::types::Variant::String(replace_source.clone()));
}else{
panic!("failed to get replacement source id={} location={}",replace_id,location);
}
},
ScriptAction::Delete => {
println!("deleted source location={}",location);
replace_count+=1;
dom.destroy(script.referent());
},
ScriptAction::Flag => {
println!("flagged source location={}",location);
fail_type=Interactive::Flagged;
},
ScriptAction::Block => {
block_count+=1;
println!("blocked source location={}",location);
match fail_type{
Interactive::Passed => fail_type=Interactive::Blocked,
_=>(),
}
},
}
}else{
panic!("FATAL: failed to get source for {:?}",file_thing.file_name());
}
}else{
panic!("FATAL: failed to get_by_ref {:?}",script_ref);
}
}
let mut dest=match fail_type{
Interactive::Passed => {
println!("map={:?} passed with {} {}",file_thing.file_name(),script_count,if script_count==1 {"script"}else{"scripts"});
if replace_count==0{
std::path::PathBuf::from("maps/passed")
}else{
//create new file
println!("{} {} replaced - generating new file...",replace_count,if replace_count==1 {"script was"}else{"scripts were"});
let mut dest=std::path::PathBuf::from("maps/passed");
dest.push(file_thing.file_name());
let output = std::io::BufWriter::new(std::fs::File::create(dest)?);
rbx_binary::to_writer(output, &dom, &[dom.root_ref()])?;
//move original to processed folder
std::path::PathBuf::from("maps/unaltered")
}
},//write map into maps/processed
Interactive::Blocked => {
println!("map={:?} blocked with {}/{} {} blocked",file_thing.file_name(),block_count,script_count,if script_count==1 {"script"}else{"scripts"});
std::path::PathBuf::from("maps/blocked")
},//write map into maps/blocked
Interactive::Flagged => {
println!("map={:?} flagged",file_thing.file_name());
std::path::PathBuf::from("maps/flagged")
},//write map into maps/flagged
};
dest.push(file_thing.file_name());
std::fs::rename(file_thing.path(), dest)?;
}
std::fs::write("id",id.to_string())?;
Ok(())
}
fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Download(map_list)=>download(map_list.maps),
Commands::Upload=>upload(),
Commands::Scan=>scan(),
Commands::Replace=>replace(),
Commands::Interactive=>interactive(),
Commands::Extract(map)=>extract(map.id),
}
#[tokio::main]
async fn main()->AResult<()>{
let cli=Cli::parse();
match cli.command{
Commands::Roblox(commands)=>commands.run().await,
Commands::Source(commands)=>commands.run().await,
}
}

View File

@@ -1,19 +0,0 @@
pub use crate::error::Error;
pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
pub type StdResult<T, E> = std::result::Result<T, E>;
// i just wanted to mess around with macros a bit
// so heres labelprint as a macro
#[macro_export]
macro_rules! lprint {
($expr:expr) => {{
let ___this_file = std::file!();
let ___line = std::line!();
// let ___column = column!();
println!("[{}:{}] {}", ___this_file, ___line, $expr);
}};
($expr:expr, $($arg:tt)*) => {{
lprint!(format!($expr, $($arg)*));
}};
}

457
src/roblox.rs Normal file
View File

@@ -0,0 +1,457 @@
use std::io::{Cursor,Read,Seek};
use std::collections::HashSet;
use rbx_dom_weak::Instance;
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
use rbxassetid::RobloxAssetId;
// === Public library API ===
/// Unique asset IDs referenced by a Roblox place/model file.
#[derive(Default)]
pub struct UniqueAssets{
pub meshes:HashSet<RobloxAssetId>,
pub unions:HashSet<RobloxAssetId>,
pub textures:HashSet<RobloxAssetId>,
}
#[derive(Debug,thiserror::Error)]
pub enum LoadDomError{
#[error("IO error {0:?}")]
IO(#[from]std::io::Error),
#[error("Binary decode error {0:?}")]
Binary(rbx_binary::DecodeError),
#[error("XML decode error {0:?}")]
Xml(rbx_xml::DecodeError),
#[error("Unknown file format")]
UnknownFormat,
}
/// Parse a Roblox file (binary or XML) from bytes into a WeakDom.
pub fn load_dom(data:&[u8])->Result<rbx_dom_weak::WeakDom,LoadDomError>{
load_dom_reader(Cursor::new(data))
}
fn load_dom_reader<R:Read+Seek>(mut input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
let mut first_8=[0u8;8];
input.read_exact(&mut first_8)?;
input.rewind()?;
match &first_8{
b"<roblox!"=>rbx_binary::from_reader(input).map_err(LoadDomError::Binary),
b"<roblox "=>rbx_xml::from_reader(input,rbx_xml::DecodeOptions::default()).map_err(LoadDomError::Xml),
_=>Err(LoadDomError::UnknownFormat),
}
}
/// Scan a parsed DOM and return all unique asset IDs (meshes, textures, unions).
pub fn get_unique_assets(dom:rbx_dom_weak::WeakDom)->UniqueAssets{
let mut assets=UniqueAssets::default();
for object in dom.into_raw().1.into_values(){
assets.collect(&object);
}
assets
}
/// Scan a Roblox file (bytes) and return all unique asset IDs.
pub fn get_unique_assets_from_file(data:&[u8])->Result<UniqueAssets,LoadDomError>{
let dom=load_dom(data)?;
Ok(get_unique_assets(dom))
}
#[derive(Debug,thiserror::Error)]
pub enum ConvertTextureError{
#[error("Image error {0:?}")]
Image(#[from]image::ImageError),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
/// Convert image bytes (PNG, JPEG, etc.) into DDS texture bytes.
pub fn convert_texture_to_dds(image_data:&[u8])->Result<Vec<u8>,ConvertTextureError>{
let image=image::load_from_memory(image_data)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let mut buf=Vec::new();
dds.write(&mut Cursor::new(&mut buf))?;
Ok(buf)
}
#[derive(Debug,thiserror::Error)]
pub enum ConvertError{
#[error("IO error {0:?}")]
IO(#[from]std::io::Error),
#[error("SNF map error {0:?}")]
SNFMap(strafesnet_snf::map::Error),
#[error("Roblox read error {0:?}")]
RobloxRead(strafesnet_rbx_loader::ReadError),
#[error("Roblox load error {0:?}")]
RobloxLoad(strafesnet_rbx_loader::LoadError),
}
/// Convert a Roblox place/model file (bytes) to SNF map format (bytes).
pub fn convert_to_snf(data:&[u8])->Result<Vec<u8>,ConvertError>{
let model=strafesnet_rbx_loader::read(
Cursor::new(data)
).map_err(ConvertError::RobloxRead)?;
let mut place=model.into_place();
place.run_scripts();
let map=place.to_snf(LoadFailureMode::DefaultToNone).map_err(ConvertError::RobloxLoad)?;
let mut buf=Vec::new();
strafesnet_snf::map::write_map(Cursor::new(&mut buf),map).map_err(ConvertError::SNFMap)?;
Ok(buf)
}
/// Download a single asset from Roblox by ID. Returns raw asset bytes.
pub async fn download_asset(context:&rbx_asset::cookie::CookieContext,asset_id:u64)->Result<Vec<u8>,rbx_asset::cookie::GetError>{
context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id,
version:None,
}).await
}
/// Download a single asset with retry and exponential backoff for rate limiting.
/// Returns None if all retries are exhausted or a non-rate-limit error occurs.
pub async fn download_asset_retry(context:&rbx_asset::cookie::CookieContext,asset_id:u64)->Option<Vec<u8>>{
const BACKOFF_MUL:f32=1.3956124250860895286;
let mut backoff=1000f32;
for _ in 0..12{
match download_asset(context,asset_id).await{
Ok(data)=>return Some(data),
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
backoff*=BACKOFF_MUL;
}else{
return None;
}
},
Err(_)=>return None,
}
}
None
}
// === Private helpers ===
impl UniqueAssets{
fn collect(&mut self,object:&Instance){
match object.class.as_str(){
"Beam"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Decal"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Texture"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"FileMesh"=>accumulate_content_id(&mut self.textures,object,"TextureId"),
"MeshPart"=>{
accumulate_content_id(&mut self.textures,object,"TextureID");
accumulate_content_id(&mut self.meshes,object,"MeshId");
},
"SpecialMesh"=>accumulate_content_id(&mut self.meshes,object,"MeshId"),
"ParticleEmitter"=>accumulate_content_id(&mut self.textures,object,"Texture"),
"Sky"=>{
accumulate_content_id(&mut self.textures,object,"MoonTextureId");
accumulate_content_id(&mut self.textures,object,"SkyboxBk");
accumulate_content_id(&mut self.textures,object,"SkyboxDn");
accumulate_content_id(&mut self.textures,object,"SkyboxFt");
accumulate_content_id(&mut self.textures,object,"SkyboxLf");
accumulate_content_id(&mut self.textures,object,"SkyboxRt");
accumulate_content_id(&mut self.textures,object,"SkyboxUp");
accumulate_content_id(&mut self.textures,object,"SunTextureId");
},
"UnionOperation"=>accumulate_content_id(&mut self.unions,object,"AssetId"),
_=>(),
}
}
}
fn accumulate_content_id(content_list:&mut HashSet<RobloxAssetId>,object:&Instance,property:&str){
if let Some(rbx_dom_weak::types::Variant::Content(content))=object.properties.get(property){
let url:&str=content.as_ref();
if let Ok(asset_id)=url.parse(){
content_list.insert(asset_id);
}else{
println!("Content failed to parse into AssetID: {:?}",content);
}
}else{
println!("property={} does not exist for class={}",property,object.class.as_str());
}
}
// === CLI ===
#[cfg(feature="cli")]
mod cli{
use super::*;
use std::path::{Path,PathBuf};
use clap::{Args,Subcommand};
use anyhow::Result as AResult;
use tokio::io::AsyncReadExt;
const DOWNLOAD_LIMIT:usize=16;
#[derive(Subcommand)]
pub enum Commands{
RobloxToSNF(RobloxToSNFSubcommand),
DownloadAssets(DownloadAssetsSubcommand),
}
#[derive(Args)]
pub struct RobloxToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
}
#[derive(Args)]
pub struct DownloadAssetsSubcommand{
#[arg(required=true)]
roblox_files:Vec<PathBuf>,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::RobloxToSNF(subcommand)=>cli_roblox_to_snf(subcommand.input_files,subcommand.output_folder).await,
Commands::DownloadAssets(subcommand)=>cli_download_assets(
subcommand.roblox_files,
rbx_asset::cookie::Cookie::new("".to_string()),
).await,
}
}
}
async fn read_entire_file(path:impl AsRef<Path>)->Result<Vec<u8>,std::io::Error>{
let mut file=tokio::fs::File::open(path).await?;
let mut data=Vec::new();
file.read_to_end(&mut data).await?;
Ok(data)
}
enum DownloadType{
Texture(RobloxAssetId),
Mesh(RobloxAssetId),
Union(RobloxAssetId),
}
impl DownloadType{
fn path(&self)->PathBuf{
match self{
DownloadType::Texture(asset_id)=>format!("downloaded_textures/{}",asset_id.0.to_string()).into(),
DownloadType::Mesh(asset_id)=>format!("meshes/{}",asset_id.0.to_string()).into(),
DownloadType::Union(asset_id)=>format!("unions/{}",asset_id.0.to_string()).into(),
}
}
fn asset_id(&self)->u64{
match self{
DownloadType::Texture(asset_id)=>asset_id.0,
DownloadType::Mesh(asset_id)=>asset_id.0,
DownloadType::Union(asset_id)=>asset_id.0,
}
}
}
enum DownloadResult{
Cached(PathBuf),
Data(Vec<u8>),
Failed,
}
#[derive(Default,Debug)]
struct Stats{
total_assets:u32,
cached_assets:u32,
downloaded_assets:u32,
failed_downloads:u32,
timed_out_downloads:u32,
}
async fn download_retry(stats:&mut Stats,context:&rbx_asset::cookie::CookieContext,download_instruction:DownloadType)->Result<DownloadResult,std::io::Error>{
stats.total_assets+=1;
let path=download_instruction.path();
if tokio::fs::try_exists(path.as_path()).await?{
stats.cached_assets+=1;
return Ok(DownloadResult::Cached(path));
}
let asset_id=download_instruction.asset_id();
let mut retry=0;
const BACKOFF_MUL:f32=1.3956124250860895286;
let mut backoff=1000f32;
loop{
let asset_result=context.get_asset(rbx_asset::cookie::GetAssetRequest{
asset_id,
version:None,
}).await;
match asset_result{
Ok(asset_result)=>{
stats.downloaded_assets+=1;
tokio::fs::write(path,&asset_result).await?;
break Ok(DownloadResult::Data(asset_result));
},
Err(rbx_asset::cookie::GetError::Response(rbx_asset::ResponseError::StatusCodeWithUrlAndBody(scwuab)))=>{
if scwuab.status_code.as_u16()==429{
if retry==12{
println!("Giving up asset download {asset_id}");
stats.timed_out_downloads+=1;
break Ok(DownloadResult::Failed);
}
println!("Hit roblox rate limit, waiting {:.0}ms...",backoff);
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
backoff*=BACKOFF_MUL;
retry+=1;
}else{
stats.failed_downloads+=1;
println!("weird scuwab error: {scwuab:?}");
break Ok(DownloadResult::Failed);
}
},
Err(e)=>{
stats.failed_downloads+=1;
println!("sadly error: {e}");
break Ok(DownloadResult::Failed);
},
}
}
}
async fn cli_convert_texture(asset_id:RobloxAssetId,download_result:DownloadResult)->Result<(),CliConvertTextureError>{
let data=match download_result{
DownloadResult::Cached(path)=>tokio::fs::read(path).await?,
DownloadResult::Data(data)=>data,
DownloadResult::Failed=>return Ok(()),
};
let dds_data=convert_texture_to_dds(&data)?;
let file_name=format!("textures/{}.dds",asset_id.0);
tokio::fs::write(file_name,dds_data).await?;
Ok(())
}
#[derive(Debug,thiserror::Error)]
enum CliConvertTextureError{
#[error("IO error {0:?}")]
Io(#[from]std::io::Error),
#[error("Convert texture error {0:?}")]
Convert(#[from]ConvertTextureError),
}
async fn cli_download_assets(paths:Vec<PathBuf>,cookie:rbx_asset::cookie::Cookie)->AResult<()>{
tokio::try_join!(
tokio::fs::create_dir_all("downloaded_textures"),
tokio::fs::create_dir_all("textures"),
tokio::fs::create_dir_all("meshes"),
tokio::fs::create_dir_all("unions"),
)?;
let thread_limit=std::thread::available_parallelism()?.get();
let (send_assets,mut recv_assets)=tokio::sync::mpsc::channel(DOWNLOAD_LIMIT);
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
tokio::spawn(async move{
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let send=send_assets.clone();
tokio::spawn(async move{
let data=read_entire_file(path.as_path()).await;
let result=data.map_err(LoadDomError::from).and_then(|d|{
let dom=load_dom(&d)?;
Ok(get_unique_assets(dom))
});
_=send.send(result).await;
drop(permit);
});
}
});
let mut stats=Stats::default();
let context=rbx_asset::cookie::CookieContext::new(cookie);
let mut globally_unique_assets=UniqueAssets::default();
let download_thread=tokio::spawn(async move{
while let Some(result)=recv_assets.recv().await{
let unique_assets=match result{
Ok(unique_assets)=>unique_assets,
Err(e)=>{
println!("error: {e:?}");
continue;
},
};
for texture_id in unique_assets.textures{
if globally_unique_assets.textures.insert(texture_id){
let data=download_retry(&mut stats,&context,DownloadType::Texture(texture_id)).await?;
send_texture.send((texture_id,data)).await?;
}
}
for mesh_id in unique_assets.meshes{
if globally_unique_assets.meshes.insert(mesh_id){
download_retry(&mut stats,&context,DownloadType::Mesh(mesh_id)).await?;
}
}
for union_id in unique_assets.unions{
if globally_unique_assets.unions.insert(union_id){
download_retry(&mut stats,&context,DownloadType::Union(union_id)).await?;
}
}
}
dbg!(stats);
Ok::<(),anyhow::Error>(())
});
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some((asset_id,download_result)))=(SEM.acquire().await,recv_texture.recv().await){
tokio::spawn(async move{
let result=cli_convert_texture(asset_id,download_result).await;
drop(permit);
result.unwrap();
});
}
download_thread.await??;
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
Ok(())
}
async fn cli_roblox_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::spawn(async move{
let result=cli_convert_to_snf(path.as_path(),output_folder).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
println!("elapsed={:?}", start.elapsed());
Ok(())
}
async fn cli_convert_to_snf(path:&Path,output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let snf_data=convert_to_snf(&entire_file)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
tokio::fs::write(dest,snf_data).await?;
Ok(())
}
}
#[cfg(feature="cli")]
pub use cli::Commands;

442
src/source.rs Normal file
View File

@@ -0,0 +1,442 @@
use std::io::Cursor;
use strafesnet_deferred_loader::deferred_loader::LoadFailureMode;
// === Public library API ===
#[derive(Debug,thiserror::Error)]
pub enum ConvertTextureError{
#[error("Vtf error {0:?}")]
Vtf(#[from]vtf::Error),
#[error("DDS create error {0:?}")]
DDS(#[from]image_dds::CreateDdsError),
#[error("DDS write error {0:?}")]
DDSWrite(#[from]image_dds::ddsfile::Error),
}
/// Convert VTF texture bytes to DDS texture bytes.
pub fn convert_texture_to_dds(vtf_data:&[u8])->Result<Vec<u8>,ConvertTextureError>{
let vtf_data=vtf_data.to_vec();
let image=vtf::from_bytes(&vtf_data)?.highres_image.decode(0)?.to_rgba8();
let format=if image.width()%4!=0||image.height()%4!=0{
image_dds::ImageFormat::Rgba8UnormSrgb
}else{
image_dds::ImageFormat::BC7RgbaUnormSrgb
};
let dds=image_dds::dds_from_image(
&image,
format,
image_dds::Quality::Slow,
image_dds::Mipmaps::GeneratedAutomatic,
)?;
let mut buf=Vec::new();
dds.write(&mut Cursor::new(&mut buf))?;
Ok(buf)
}
#[derive(Debug,thiserror::Error)]
pub enum ConvertError{
#[error("BSP read error {0:?}")]
BspRead(strafesnet_bsp_loader::ReadError),
#[error("BSP load error {0:?}")]
BspLoad(strafesnet_bsp_loader::LoadError),
#[error("SNF map error {0:?}")]
SNFMap(strafesnet_snf::map::Error),
#[error("BSP parse error {0:?}")]
BspParse(#[from]vbsp::BspError),
}
/// Convert a Source BSP file (bytes) to SNF map format (bytes).
pub fn convert_to_snf(bsp_data:&[u8],vpk_list:&[vpk::VPK])->Result<Vec<u8>,ConvertError>{
let bsp=strafesnet_bsp_loader::read(
Cursor::new(bsp_data)
).map_err(ConvertError::BspRead)?;
let map=bsp.to_snf(LoadFailureMode::DefaultToNone,vpk_list).map_err(ConvertError::BspLoad)?;
let mut buf=Vec::new();
strafesnet_snf::map::write_map(Cursor::new(&mut buf),map).map_err(ConvertError::SNFMap)?;
Ok(buf)
}
/// Read VPK archives from paths. Useful for loading VPKs needed by `convert_to_snf`.
pub async fn read_vpks(vpk_paths:Vec<std::path::PathBuf>,thread_limit:usize)->Vec<vpk::VPK>{
use futures::StreamExt;
futures::stream::iter(vpk_paths).map(|vpk_path|async{
tokio::task::spawn_blocking(move||vpk::VPK::read(&vpk_path)).await.unwrap().unwrap()
})
.buffer_unordered(thread_limit)
.collect().await
}
// === CLI ===
#[cfg(feature="cli")]
mod cli{
use super::*;
use std::path::{Path,PathBuf};
use std::borrow::Cow;
use clap::{Args,Subcommand};
use anyhow::Result as AResult;
use strafesnet_bsp_loader::loader::BspFinder;
use strafesnet_deferred_loader::loader::Loader;
use strafesnet_deferred_loader::deferred_loader::{MeshDeferredLoader,RenderConfigDeferredLoader};
enum VMTContent{
VMT(String),
VTF(String),
Patch(vmt_parser::material::PatchMaterial),
Unsupported,
Unresolved,
}
impl VMTContent{
fn vtf(opt:Option<String>)->Self{
match opt{
Some(s)=>Self::VTF(s),
None=>Self::Unresolved,
}
}
}
fn get_some_texture(material:vmt_parser::material::Material)->VMTContent{
match material{
vmt_parser::material::Material::LightMappedGeneric(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::VertexLitGeneric(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
vmt_parser::material::Material::VertexLitGenericDx6(mat)=>VMTContent::vtf(mat.base_texture.or(mat.decal_texture)),
vmt_parser::material::Material::UnlitGeneric(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::UnlitTwoTexture(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Water(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::WorldVertexTransition(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::EyeRefract(mat)=>VMTContent::vtf(Some(mat.cornea_texture)),
vmt_parser::material::Material::SubRect(mat)=>VMTContent::VMT(mat.material),
vmt_parser::material::Material::Sprite(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::SpriteCard(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Cable(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Refract(mat)=>VMTContent::vtf(mat.base_texture),
vmt_parser::material::Material::Modulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::DecalModulate(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Sky(mat)=>VMTContent::vtf(Some(mat.base_texture)),
vmt_parser::material::Material::Replacements(_mat)=>VMTContent::Unsupported,
vmt_parser::material::Material::Patch(mat)=>VMTContent::Patch(mat),
_=>unreachable!(),
}
}
#[derive(Debug,thiserror::Error)]
enum GetVMTError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("Utf8 error {0:?}")]
Utf8(#[from]std::str::Utf8Error),
#[error("Vdf error {0:?}")]
Vdf(#[from]vmt_parser::VdfError),
#[error("Vmt not found")]
NotFound,
}
fn get_vmt(finder:BspFinder,search_name:&str)->Result<vmt_parser::material::Material,GetVMTError>{
let vmt_data=finder.find(search_name)?.ok_or(GetVMTError::NotFound)?;
let vmt_str=core::str::from_utf8(&vmt_data)?;
let material=vmt_parser::from_str(vmt_str)?;
Ok(material)
}
#[derive(Debug,thiserror::Error)]
enum LoadVMTError{
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("GetVMT error {0:?}")]
GetVMT(#[from]GetVMTError),
#[error("FromUtf8 error {0:?}")]
FromUtf8(#[from]std::string::FromUtf8Error),
#[error("Vdf error {0:?}")]
Vdf(#[from]vmt_parser::VdfError),
#[error("Vmt unsupported")]
Unsupported,
#[error("Vmt unresolved")]
Unresolved,
#[error("Vmt not found")]
NotFound,
}
fn recursive_vmt_loader<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,material:vmt_parser::material::Material)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
where
'bsp:'a,
'vpk:'a,
{
match get_some_texture(material){
VMTContent::VMT(s)=>recursive_vmt_loader(finder,get_vmt(finder,s.as_str())?),
VMTContent::VTF(s)=>{
let mut texture_file_name=PathBuf::from("materials");
texture_file_name.push(s);
texture_file_name.set_extension("vtf");
Ok(finder.find(texture_file_name.to_str().unwrap())?)
},
VMTContent::Patch(mat)=>recursive_vmt_loader(finder,
mat.resolve(|search_name|
match finder.find(search_name)?{
Some(bytes)=>Ok(String::from_utf8(bytes.into_owned())?),
None=>Err(LoadVMTError::NotFound),
}
)?
),
VMTContent::Unsupported=>Err(LoadVMTError::Unsupported),
VMTContent::Unresolved=>Err(LoadVMTError::Unresolved),
}
}
fn load_texture<'bsp,'vpk,'a>(finder:BspFinder<'bsp,'vpk>,texture_name:&str)->Result<Option<Cow<'a,[u8]>>,LoadVMTError>
where
'bsp:'a,
'vpk:'a,
{
let mut texture_file_name=PathBuf::from("materials");
let texture_file_name_lowercase=texture_name.to_lowercase();
texture_file_name.push(texture_file_name_lowercase.clone());
let stem=PathBuf::from(texture_file_name.file_stem().unwrap());
texture_file_name.pop();
texture_file_name.push(stem);
if let Some(stuff)=finder.find(texture_file_name.to_str().unwrap())?{
return Ok(Some(stuff))
}
let mut texture_file_name_vmt=texture_file_name.clone();
texture_file_name.set_extension("vtf");
texture_file_name_vmt.set_extension("vmt");
recursive_vmt_loader(finder,get_vmt(finder,texture_file_name_vmt.to_str().unwrap())?)
}
#[derive(Subcommand)]
pub enum Commands{
SourceToSNF(SourceToSNFSubcommand),
ExtractTextures(ExtractTexturesSubcommand),
VPKContents(VPKContentsSubcommand),
BSPContents(BSPContentsSubcommand),
}
#[derive(Args)]
pub struct SourceToSNFSubcommand {
#[arg(long)]
output_folder:PathBuf,
#[arg(required=true)]
input_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct ExtractTexturesSubcommand{
#[arg(required=true)]
bsp_files:Vec<PathBuf>,
#[arg(long)]
vpks:Vec<PathBuf>,
}
#[derive(Args)]
pub struct VPKContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
#[derive(Args)]
pub struct BSPContentsSubcommand {
#[arg(long)]
input_file:PathBuf,
}
impl Commands{
pub async fn run(self)->AResult<()>{
match self{
Commands::SourceToSNF(subcommand)=>cli_source_to_snf(subcommand.input_files,subcommand.output_folder,subcommand.vpks).await,
Commands::ExtractTextures(subcommand)=>cli_extract_textures(subcommand.bsp_files,subcommand.vpks).await,
Commands::VPKContents(subcommand)=>vpk_contents(subcommand.input_file),
Commands::BSPContents(subcommand)=>bsp_contents(subcommand.input_file),
}
}
}
#[derive(Debug,thiserror::Error)]
enum ExtractTextureError{
#[error("Io error {0:?}")]
Io(#[from]std::io::Error),
#[error("Bsp error {0:?}")]
Bsp(#[from]vbsp::BspError),
#[error("MeshLoad error {0:?}")]
MeshLoad(#[from]strafesnet_bsp_loader::loader::MeshError),
#[error("Load VMT error {0:?}")]
LoadVMT(#[from]LoadVMTError),
}
async fn gimme_them_textures(path:&Path,vpk_list:&[vpk::VPK],send_texture:tokio::sync::mpsc::Sender<(Vec<u8>,String)>)->Result<(),ExtractTextureError>{
let bsp=vbsp::Bsp::read(tokio::fs::read(path).await?.as_ref())?;
let loader_bsp=strafesnet_bsp_loader::Bsp::new(bsp);
let bsp=loader_bsp.as_ref();
let mut texture_deferred_loader=RenderConfigDeferredLoader::new();
for texture in bsp.textures(){
texture_deferred_loader.acquire_render_config_id(Some(Cow::Borrowed(texture.name())));
}
let mut mesh_deferred_loader=MeshDeferredLoader::new();
for prop in bsp.static_props(){
mesh_deferred_loader.acquire_mesh_id(prop.model());
}
let finder=BspFinder{
bsp:&loader_bsp,
vpks:vpk_list
};
let mut mesh_loader=strafesnet_bsp_loader::loader::ModelLoader::new(finder);
for model_path in mesh_deferred_loader.into_indices(){
let model:vmdl::Model=match mesh_loader.load(model_path){
Ok(model)=>model,
Err(e)=>{
println!("Model={model_path} Load model error: {e}");
continue;
},
};
for texture in model.textures(){
for search_path in &texture.search_paths{
let mut path=PathBuf::from(search_path.as_str());
path.push(texture.name.as_str());
let path=path.to_str().unwrap().to_owned();
texture_deferred_loader.acquire_render_config_id(Some(Cow::Owned(path)));
}
}
}
for texture_path in texture_deferred_loader.into_indices(){
match load_texture(finder,&texture_path){
Ok(Some(texture))=>send_texture.send(
(texture.into_owned(),texture_path.into_owned())
).await.unwrap(),
Ok(None)=>(),
Err(e)=>println!("Texture={texture_path} Load error: {e}"),
}
}
Ok(())
}
#[derive(Debug,thiserror::Error)]
enum CliConvertTextureError{
#[error("IO error {0:?}")]
Io(#[from]std::io::Error),
#[error("Convert texture error {0:?}")]
Convert(#[from]ConvertTextureError),
}
async fn cli_convert_texture(texture:Vec<u8>,write_file_name:impl AsRef<Path>)->Result<(),CliConvertTextureError>{
let dds_data=convert_texture_to_dds(&texture)?;
let mut dest=PathBuf::from("textures");
dest.push(write_file_name);
dest.set_extension("dds");
std::fs::create_dir_all(dest.parent().unwrap())?;
let mut writer=std::io::BufWriter::new(std::fs::File::create(dest)?);
std::io::Write::write_all(&mut writer,&dds_data)?;
Ok(())
}
async fn cli_extract_textures(paths:Vec<PathBuf>,vpk_paths:Vec<PathBuf>)->AResult<()>{
tokio::try_join!(
tokio::fs::create_dir_all("extracted_textures"),
tokio::fs::create_dir_all("textures"),
tokio::fs::create_dir_all("meshes"),
)?;
let thread_limit=std::thread::available_parallelism()?.get();
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
let vpk_list:&[vpk::VPK]=vpk_list.leak();
let (send_texture,mut recv_texture)=tokio::sync::mpsc::channel(thread_limit);
let mut it=paths.into_iter();
let extract_thread=tokio::spawn(async move{
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let send=send_texture.clone();
tokio::spawn(async move{
let result=gimme_them_textures(&path,vpk_list,send).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Map={path:?} Decode error: {e:?}"),
}
});
}
});
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some((data,dest)))=(SEM.acquire().await,recv_texture.recv().await){
tokio::spawn(async move{
let result=cli_convert_texture(data,dest).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
extract_thread.await?;
_=SEM.acquire_many(thread_limit as u32).await?;
Ok(())
}
fn vpk_contents(vpk_path:PathBuf)->AResult<()>{
let vpk_index=vpk::VPK::read(&vpk_path)?;
for (label,entry) in vpk_index.tree.into_iter(){
println!("vpk label={} entry={:?}",label,entry);
}
Ok(())
}
fn bsp_contents(path:PathBuf)->AResult<()>{
let bsp=vbsp::Bsp::read(std::fs::read(path)?.as_ref())?;
for file_name in bsp.pack.into_zip().into_inner().unwrap().file_names(){
println!("file_name={:?}",file_name);
}
Ok(())
}
async fn cli_convert_to_snf(path:&Path,vpk_list:&[vpk::VPK],output_folder:PathBuf)->AResult<()>{
let entire_file=tokio::fs::read(path).await?;
let snf_data=convert_to_snf(&entire_file,vpk_list)?;
let mut dest=output_folder;
dest.push(path.file_stem().unwrap());
dest.set_extension("snfm");
tokio::fs::write(dest,snf_data).await?;
Ok(())
}
async fn cli_source_to_snf(paths:Vec<std::path::PathBuf>,output_folder:PathBuf,vpk_paths:Vec<PathBuf>)->AResult<()>{
let start=std::time::Instant::now();
let thread_limit=std::thread::available_parallelism()?.get();
let vpk_list=read_vpks(vpk_paths,thread_limit).await;
let vpk_list:&[vpk::VPK]=vpk_list.leak();
let mut it=paths.into_iter();
static SEM:tokio::sync::Semaphore=tokio::sync::Semaphore::const_new(0);
SEM.add_permits(thread_limit);
while let (Ok(permit),Some(path))=(SEM.acquire().await,it.next()){
let output_folder=output_folder.clone();
tokio::spawn(async move{
let result=cli_convert_to_snf(path.as_path(),vpk_list,output_folder).await;
drop(permit);
match result{
Ok(())=>(),
Err(e)=>println!("Convert error: {e:?}"),
}
});
}
_=SEM.acquire_many(thread_limit as u32).await.unwrap();
println!("elapsed={:?}", start.elapsed());
Ok(())
}
}
#[cfg(feature="cli")]
pub use cli::Commands;