forked from StrafesNET/asset-tool
Repository-based logic seems to automatically be made relative to the top-level path the repo was initialised with, so we can actually support an output folder with no issues here
1446 lines
46 KiB
Rust
1446 lines
46 KiB
Rust
use std::{io::Read,path::PathBuf};
|
|
use clap::{Args,Parser,Subcommand};
|
|
use anyhow::{anyhow,Result as AResult};
|
|
use futures::StreamExt;
|
|
use rbx_asset::cloud::{ApiKey,CloudContext};
|
|
use rbx_asset::cookie::{Cookie,CookieContext,AssetVersion,InventoryItem};
|
|
|
|
type AssetID=u64;
|
|
type AssetIDFileMap=Vec<(AssetID,PathBuf)>;
|
|
const CONCURRENT_DECODE:usize=8;
|
|
const CONCURRENT_REQUESTS:usize=32;
|
|
|
|
#[derive(Parser)]
|
|
#[command(author,version,about,long_about=None)]
|
|
#[command(propagate_version = true)]
|
|
struct Cli{
|
|
#[command(subcommand)]
|
|
command:Commands,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum Commands{
|
|
DownloadHistory(DownloadHistorySubcommand),
|
|
Download(DownloadSubcommand),
|
|
DownloadDecompile(DownloadDecompileSubcommand),
|
|
DownloadGroupInventoryJson(DownloadGroupInventoryJsonSubcommand),
|
|
CreateAsset(CreateAssetSubcommand),
|
|
CreateAssetMedia(CreateAssetMediaSubcommand),
|
|
CreateAssetMedias(CreateAssetMediasSubcommand),
|
|
UploadAsset(UpdateAssetSubcommand),
|
|
UploadAssetMedia(UpdateAssetMediaSubcommand),
|
|
UploadPlace(UpdatePlaceSubcommand),
|
|
Compile(CompileSubcommand),
|
|
CompileUploadAsset(CompileUploadAssetSubcommand),
|
|
CompileUploadPlace(CompileUploadPlaceSubcommand),
|
|
Decompile(DecompileSubcommand),
|
|
DecompileHistoryIntoGit(DecompileHistoryIntoGitSubcommand),
|
|
DownloadAndDecompileHistoryIntoGit(DownloadAndDecompileHistoryIntoGitSubcommand),
|
|
}
|
|
|
|
/// Download a range of assets from the asset version history. Download summary is saved to `output_folder/versions.json`, and can be optionally used to download only new versions the next time.
|
|
#[derive(Args)]
|
|
struct DownloadHistorySubcommand{
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
continue_from_versions:Option<bool>,
|
|
#[arg(long)]
|
|
start_version:Option<u64>,
|
|
#[arg(long)]
|
|
end_version:Option<u64>,
|
|
}
|
|
/// Download a single asset by id.
|
|
#[derive(Args)]
|
|
struct DownloadSubcommand{
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(required=true)]
|
|
asset_ids:Vec<AssetID>,
|
|
}
|
|
/// Download the list of asset ids (not the assets themselves) in a group inventory. The output is written to `output_folder/versions.json`
|
|
#[derive(Args)]
|
|
struct DownloadGroupInventoryJsonSubcommand{
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
group:u64,
|
|
}
|
|
/// Upload a (.rbxm, .rbxmx) model file, creating a new asset. Can be any type of model, including modulescripts.
|
|
#[derive(Args)]
|
|
struct CreateAssetSubcommand{
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
group_id:Option<u64>,
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
#[arg(long)]
|
|
model_name:String,
|
|
#[arg(long)]
|
|
description:Option<String>,
|
|
#[arg(long)]
|
|
free_model:Option<bool>,
|
|
#[arg(long)]
|
|
allow_comments:Option<bool>,
|
|
}
|
|
/// Upload a media file (.jpg, .png) to a new asset and print the asset id
|
|
#[derive(Args)]
|
|
struct CreateAssetMediaSubcommand{
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_literal:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_envvar:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
model_name:String,
|
|
#[arg(long)]
|
|
description:Option<String>,
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
#[arg(long)]
|
|
asset_type:AssetType,
|
|
#[arg(long,group="creator",required=true)]
|
|
creator_user_id:Option<u64>,
|
|
#[arg(long,group="creator",required=true)]
|
|
creator_group_id:Option<u64>,
|
|
/// Expected price limits how much robux can be spent to create the asset (defaults to 0)
|
|
#[arg(long)]
|
|
expected_price:Option<u64>,
|
|
}
|
|
/// Upload multiple media files (.jpg, .png) Automatically detect the media type from file extension and generate asset name and description. If you want support for more file types (.fbx, .mp3, .ogg) it should be fairly straightforward, just ask.
|
|
#[derive(Args)]
|
|
struct CreateAssetMediasSubcommand{
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_literal:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_envvar:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_file:Option<PathBuf>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
description:Option<String>,
|
|
#[arg(long,group="creator",required=true)]
|
|
creator_user_id:Option<u64>,
|
|
#[arg(long,group="creator",required=true)]
|
|
creator_group_id:Option<u64>,
|
|
/// Expected price limits how much robux can be spent to create the asset (defaults to 0)
|
|
#[arg(long)]
|
|
expected_price:Option<u64>,
|
|
input_files:Vec<PathBuf>,
|
|
}
|
|
/// Upload a (.rbxm, .rbxmx) model file to an existing asset. Can be any type of model, including modulescripts.
|
|
#[derive(Args)]
|
|
struct UpdateAssetSubcommand{
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
group_id:Option<u64>,
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
#[arg(long)]
|
|
change_name:Option<String>,
|
|
#[arg(long)]
|
|
change_description:Option<String>,
|
|
#[arg(long)]
|
|
change_free_model:Option<bool>,
|
|
#[arg(long)]
|
|
change_allow_comments:Option<bool>,
|
|
}
|
|
/// Upload a media file (.jpg, .png) to an existing asset.
|
|
#[derive(Args)]
|
|
struct UpdateAssetMediaSubcommand{
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_literal:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_envvar:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
}
|
|
/// Upload a place file (.rbxl, .rbxlx) to an existing place.
|
|
#[derive(Args)]
|
|
struct UpdatePlaceSubcommand{
|
|
#[arg(long)]
|
|
place_id:u64,
|
|
#[arg(long)]
|
|
universe_id:u64,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_literal:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_envvar:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
}
|
|
/// Take an input folder containing scripts and models and turn it into a roblox file. The two types of files (.rbxl: place, .rbxm: model) are actually the same file format, only the contents differ.
|
|
#[derive(Args)]
|
|
struct CompileSubcommand{
|
|
#[arg(long)]
|
|
input_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_file:PathBuf,
|
|
#[arg(long)]
|
|
style:Option<Style>,
|
|
#[arg(long)]
|
|
template:Option<PathBuf>,
|
|
}
|
|
/// Take an input folder containing scripts and models and turn it into a roblox file, then upload it to the specified asset id. Does not work for places.
|
|
#[derive(Args)]
|
|
struct CompileUploadAssetSubcommand{
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
group_id:Option<u64>,
|
|
#[arg(long)]
|
|
input_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
style:Option<Style>,
|
|
#[arg(long)]
|
|
template:Option<PathBuf>,
|
|
}
|
|
/// Take an input folder containing scripts and models and turn it into a roblox file, then upload it to the specified place id. Does not work for model asset ids.
|
|
#[derive(Args)]
|
|
struct CompileUploadPlaceSubcommand{
|
|
#[arg(long)]
|
|
place_id:u64,
|
|
#[arg(long)]
|
|
universe_id:u64,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_literal:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_envvar:Option<String>,
|
|
#[arg(long,group="api_key",required=true)]
|
|
api_key_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
input_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
style:Option<Style>,
|
|
#[arg(long)]
|
|
template:Option<PathBuf>,
|
|
}
|
|
/// Take a roblox file (.rbxm, .rbxl) and turn it into a folder containing scripts and models. Rox style means property overrides are written to the top of scripts, Rojo style means property overrides are written to the script file extension (Script.server.lua).
|
|
#[derive(Args)]
|
|
struct DecompileSubcommand{
|
|
#[arg(long)]
|
|
input_file:PathBuf,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
style:Style,
|
|
#[arg(long)]
|
|
write_template:Option<bool>,
|
|
#[arg(long)]
|
|
write_models:Option<bool>,
|
|
#[arg(long)]
|
|
write_scripts:Option<bool>,
|
|
}
|
|
/// Download a model from the specified asset id, and decompile it into a folder in one swift motion. The model file is not saved to disk. This also works for places.
|
|
#[derive(Args)]
|
|
struct DownloadDecompileSubcommand{
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long)]
|
|
style:Style,
|
|
#[arg(long)]
|
|
write_template:Option<bool>,
|
|
#[arg(long)]
|
|
write_models:Option<bool>,
|
|
#[arg(long)]
|
|
write_scripts:Option<bool>,
|
|
}
|
|
/// Take a folder of asset history (containing `versions.json`) and decompile each version into its own git commit. This must be run with the desired output folder as the current directory due to git2 limitations.
|
|
#[derive(Args)]
|
|
struct DecompileHistoryIntoGitSubcommand{
|
|
#[arg(long)]
|
|
input_folder:PathBuf,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
style:Style,
|
|
#[arg(long)]
|
|
git_committer_name:String,
|
|
#[arg(long)]
|
|
git_committer_email:String,
|
|
#[arg(long)]
|
|
write_template:Option<bool>,
|
|
#[arg(long)]
|
|
write_models:Option<bool>,
|
|
#[arg(long)]
|
|
write_scripts:Option<bool>,
|
|
}
|
|
/// Download asset history, download asset versions, decompile into folder, create a git commit for each version. This is a combination of two commands (download-history, decompile-history-into-git) except without intermediate files.
|
|
#[derive(Args)]
|
|
struct DownloadAndDecompileHistoryIntoGitSubcommand{
|
|
#[arg(long)]
|
|
asset_id:AssetID,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_literal:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_envvar:Option<String>,
|
|
#[arg(long,group="cookie",required=true)]
|
|
cookie_file:Option<PathBuf>,
|
|
#[arg(long)]
|
|
output_folder:Option<PathBuf>,
|
|
#[arg(long)]
|
|
style:Style,
|
|
#[arg(long)]
|
|
git_committer_name:String,
|
|
#[arg(long)]
|
|
git_committer_email:String,
|
|
#[arg(long)]
|
|
write_template:Option<bool>,
|
|
#[arg(long)]
|
|
write_models:Option<bool>,
|
|
#[arg(long)]
|
|
write_scripts:Option<bool>,
|
|
}
|
|
|
|
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
|
enum Style{
|
|
Rox,
|
|
Rojo,
|
|
RoxRojo,
|
|
}
|
|
impl Style{
|
|
fn rox(&self)->rox_compiler::Style{
|
|
match self{
|
|
Style::Rox=>rox_compiler::Style::Rox,
|
|
Style::Rojo=>rox_compiler::Style::Rojo,
|
|
Style::RoxRojo=>rox_compiler::Style::RoxRojo,
|
|
}
|
|
}
|
|
}
|
|
#[derive(Clone,Copy,Debug,clap::ValueEnum)]
|
|
enum AssetType{
|
|
Audio,
|
|
Decal,
|
|
Model,
|
|
}
|
|
impl AssetType{
|
|
fn cloud(&self)->rbx_asset::cloud::AssetType{
|
|
match self{
|
|
AssetType::Audio=>rbx_asset::cloud::AssetType::Audio,
|
|
AssetType::Decal=>rbx_asset::cloud::AssetType::Decal,
|
|
AssetType::Model=>rbx_asset::cloud::AssetType::Model,
|
|
}
|
|
}
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main()->AResult<()>{
|
|
let cli=Cli::parse();
|
|
match cli.command{
|
|
Commands::DownloadHistory(subcommand)=>download_history(DownloadHistoryConfig{
|
|
continue_from_versions:subcommand.continue_from_versions.unwrap_or(false),
|
|
end_version:subcommand.end_version,
|
|
start_version:subcommand.start_version.unwrap_or(0),
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
}).await,
|
|
Commands::Download(subcommand)=>{
|
|
let output_folder=subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap());
|
|
download_list(
|
|
cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
subcommand.asset_ids.into_iter().map(|asset_id|{
|
|
let mut path=output_folder.clone();
|
|
path.push(asset_id.to_string());
|
|
(asset_id,path)
|
|
}).collect()
|
|
).await
|
|
},
|
|
Commands::DownloadDecompile(subcommand)=>{
|
|
download_decompile(DownloadDecompileConfig{
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
style:subcommand.style.rox(),
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
|
}).await
|
|
},
|
|
Commands::DownloadGroupInventoryJson(subcommand)=>download_group_inventory_json(
|
|
cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
subcommand.group,
|
|
subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
).await,
|
|
Commands::CreateAsset(subcommand)=>create_asset(CreateAssetConfig{
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
group:subcommand.group_id,
|
|
input_file:subcommand.input_file,
|
|
model_name:subcommand.model_name,
|
|
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
|
free_model:subcommand.free_model.unwrap_or(false),
|
|
allow_comments:subcommand.allow_comments.unwrap_or(false),
|
|
}).await,
|
|
Commands::CreateAssetMedia(subcommand)=>create_asset_media(CreateAssetMediaConfig{
|
|
api_key:api_key_from_args(
|
|
subcommand.api_key_literal,
|
|
subcommand.api_key_envvar,
|
|
subcommand.api_key_file,
|
|
).await?,
|
|
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
|
|
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
|
|
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
|
|
other=>Err(anyhow!("Invalid creator {other:?}"))?,
|
|
},
|
|
input_file:subcommand.input_file,
|
|
asset_type:subcommand.asset_type.cloud(),
|
|
model_name:subcommand.model_name,
|
|
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
|
expected_price:subcommand.expected_price,
|
|
}).await,
|
|
Commands::CreateAssetMedias(subcommand)=>create_asset_medias(CreateAssetMediasConfig{
|
|
api_key:api_key_from_args(
|
|
subcommand.api_key_literal,
|
|
subcommand.api_key_envvar,
|
|
subcommand.api_key_file,
|
|
).await?,
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
creator:match (subcommand.creator_user_id,subcommand.creator_group_id){
|
|
(Some(user_id),None)=>rbx_asset::cloud::Creator::userId(user_id.to_string()),
|
|
(None,Some(group_id))=>rbx_asset::cloud::Creator::groupId(group_id.to_string()),
|
|
other=>Err(anyhow!("Invalid creator {other:?}"))?,
|
|
},
|
|
description:subcommand.description.unwrap_or_else(||String::with_capacity(0)),
|
|
input_files:subcommand.input_files,
|
|
expected_price:subcommand.expected_price,
|
|
}).await,
|
|
Commands::UploadAsset(subcommand)=>upload_asset(UploadAssetConfig{
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
group_id:subcommand.group_id,
|
|
input_file:subcommand.input_file,
|
|
change_name:subcommand.change_name,
|
|
change_description:subcommand.change_description,
|
|
change_free_model:subcommand.change_free_model,
|
|
change_allow_comments:subcommand.change_allow_comments,
|
|
}).await,
|
|
Commands::UploadAssetMedia(subcommand)=>upload_asset_media(UploadAssetMediaConfig{
|
|
api_key:api_key_from_args(
|
|
subcommand.api_key_literal,
|
|
subcommand.api_key_envvar,
|
|
subcommand.api_key_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
input_file:subcommand.input_file,
|
|
}).await,
|
|
Commands::UploadPlace(subcommand)=>upload_place(UploadPlaceConfig{
|
|
api_key:api_key_from_args(
|
|
subcommand.api_key_literal,
|
|
subcommand.api_key_envvar,
|
|
subcommand.api_key_file,
|
|
).await?,
|
|
place_id:subcommand.place_id,
|
|
universe_id:subcommand.universe_id,
|
|
input_file:subcommand.input_file,
|
|
}).await,
|
|
Commands::Compile(subcommand)=>compile(CompileConfig{
|
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
output_file:subcommand.output_file,
|
|
template:subcommand.template,
|
|
style:subcommand.style.map(|s|s.rox()),
|
|
}).await,
|
|
Commands::CompileUploadAsset(subcommand)=>compile_upload_asset(CompileUploadAssetConfig{
|
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
template:subcommand.template,
|
|
style:subcommand.style.map(|s|s.rox()),
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
group_id:subcommand.group_id,
|
|
}).await,
|
|
Commands::CompileUploadPlace(subcommand)=>compile_upload_place(CompileUploadPlaceConfig{
|
|
input_folder:subcommand.input_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
template:subcommand.template,
|
|
style:subcommand.style.map(|s|s.rox()),
|
|
api_key:api_key_from_args(
|
|
subcommand.api_key_literal,
|
|
subcommand.api_key_envvar,
|
|
subcommand.api_key_file,
|
|
).await?,
|
|
place_id:subcommand.place_id,
|
|
universe_id:subcommand.universe_id,
|
|
}).await,
|
|
Commands::Decompile(subcommand)=>decompile(DecompileConfig{
|
|
style:subcommand.style.rox(),
|
|
input_file:subcommand.input_file,
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
|
}).await,
|
|
Commands::DecompileHistoryIntoGit(subcommand)=>decompile_history_into_git(DecompileHistoryConfig{
|
|
git_committer_name:subcommand.git_committer_name,
|
|
git_committer_email:subcommand.git_committer_email,
|
|
input_folder:subcommand.input_folder,
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
style:subcommand.style.rox(),
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
|
}).await,
|
|
Commands::DownloadAndDecompileHistoryIntoGit(subcommand)=>download_and_decompile_history_into_git(DownloadAndDecompileHistoryConfig{
|
|
git_committer_name:subcommand.git_committer_name,
|
|
git_committer_email:subcommand.git_committer_email,
|
|
cookie:cookie_from_args(
|
|
subcommand.cookie_literal,
|
|
subcommand.cookie_envvar,
|
|
subcommand.cookie_file,
|
|
).await?,
|
|
asset_id:subcommand.asset_id,
|
|
output_folder:subcommand.output_folder.unwrap_or_else(||std::env::current_dir().unwrap()),
|
|
style:subcommand.style.rox(),
|
|
write_template:subcommand.write_template.unwrap_or(false),
|
|
write_models:subcommand.write_models.unwrap_or(false),
|
|
write_scripts:subcommand.write_scripts.unwrap_or(true),
|
|
}).await,
|
|
}
|
|
}
|
|
|
|
async fn cookie_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<Cookie>{
|
|
let cookie=match (literal,environment,file){
|
|
(Some(cookie_literal),None,None)=>cookie_literal,
|
|
(None,Some(cookie_environment),None)=>std::env::var(cookie_environment)?,
|
|
(None,None,Some(cookie_file))=>tokio::fs::read_to_string(cookie_file).await?,
|
|
_=>Err(anyhow::Error::msg("Illegal cookie argument triple"))?,
|
|
};
|
|
Ok(Cookie::new(format!(".ROBLOSECURITY={cookie}")))
|
|
}
|
|
async fn api_key_from_args(literal:Option<String>,environment:Option<String>,file:Option<PathBuf>)->AResult<ApiKey>{
|
|
let api_key=match (literal,environment,file){
|
|
(Some(api_key_literal),None,None)=>api_key_literal,
|
|
(None,Some(api_key_environment),None)=>std::env::var(api_key_environment)?,
|
|
(None,None,Some(api_key_file))=>tokio::fs::read_to_string(api_key_file).await?,
|
|
_=>Err(anyhow::Error::msg("Illegal api key argument triple"))?,
|
|
};
|
|
Ok(ApiKey::new(api_key))
|
|
}
|
|
|
|
struct CreateAssetConfig{
|
|
cookie:Cookie,
|
|
model_name:String,
|
|
description:String,
|
|
input_file:PathBuf,
|
|
group:Option<u64>,
|
|
free_model:bool,
|
|
allow_comments:bool,
|
|
}
|
|
|
|
async fn create_asset(config:CreateAssetConfig)->AResult<()>{
|
|
let resp=CookieContext::new(config.cookie)
|
|
.create(rbx_asset::cookie::CreateRequest{
|
|
name:config.model_name,
|
|
description:config.description,
|
|
ispublic:config.free_model,
|
|
allowComments:config.allow_comments,
|
|
groupId:config.group,
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
println!("UploadResponse={:?}",resp);
|
|
Ok(())
|
|
}
|
|
|
|
struct CreateAssetMediaConfig{
|
|
api_key:ApiKey,
|
|
asset_type:rbx_asset::cloud::AssetType,
|
|
model_name:String,
|
|
description:String,
|
|
input_file:PathBuf,
|
|
creator:rbx_asset::cloud::Creator,
|
|
expected_price:Option<u64>,
|
|
}
|
|
|
|
async fn get_asset_exp_backoff(
|
|
context:&CloudContext,
|
|
asset_operation:&rbx_asset::cloud::AssetOperation
|
|
)->Result<rbx_asset::cloud::AssetResponse,rbx_asset::cloud::AssetOperationError>{
|
|
const BACKOFF_MUL:f32=1.3956124250860895286;//exp(1/3)
|
|
let mut backoff=1000f32;
|
|
loop{
|
|
match asset_operation.try_get_asset(&context).await{
|
|
//try again when the operation is not done
|
|
Err(rbx_asset::cloud::AssetOperationError::Operation(rbx_asset::cloud::OperationError::NotDone))=>(),
|
|
//return all other results
|
|
other_result=>return other_result,
|
|
}
|
|
println!("Operation not complete; waiting {:.0}ms...",backoff);
|
|
tokio::time::sleep(std::time::Duration::from_millis(backoff as u64)).await;
|
|
backoff*=BACKOFF_MUL;
|
|
}
|
|
}
|
|
|
|
async fn create_asset_media(config:CreateAssetMediaConfig)->AResult<()>{
|
|
let context=CloudContext::new(config.api_key);
|
|
let asset_response=context
|
|
.create_asset(rbx_asset::cloud::CreateAssetRequest{
|
|
assetType:config.asset_type,
|
|
displayName:config.model_name,
|
|
description:config.description,
|
|
creationContext:rbx_asset::cloud::CreationContext{
|
|
creator:config.creator,
|
|
expectedPrice:Some(config.expected_price.unwrap_or(0)),
|
|
}
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
//hardcode a 2 second sleep because roblox be slow
|
|
println!("Asset submitted, waiting 2s...");
|
|
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
|
|
let asset=get_asset_exp_backoff(&context,&asset_response).await?;
|
|
println!("CreateResponse={:?}",asset);
|
|
Ok(())
|
|
}
|
|
|
|
// complex operation requires both api key and cookie! how horrible! roblox please fix!
|
|
struct CreateAssetMediasConfig{
|
|
api_key:ApiKey,
|
|
cookie:Cookie,
|
|
description:String,
|
|
input_files:Vec<PathBuf>,
|
|
creator:rbx_asset::cloud::Creator,
|
|
expected_price:Option<u64>,
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
#[allow(dead_code)]
|
|
enum CreateAssetMediasError{
|
|
NoFileStem(PathBuf),
|
|
IO(std::io::Error),
|
|
UnknownFourCC(Option<[u8;4]>),
|
|
Create(rbx_asset::cloud::CreateError),
|
|
}
|
|
impl std::fmt::Display for CreateAssetMediasError{
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
write!(f,"{self:?}")
|
|
}
|
|
}
|
|
impl std::error::Error for CreateAssetMediasError{}
|
|
|
|
#[derive(Debug)]
|
|
#[allow(dead_code)]
|
|
enum PollOperationError{
|
|
CreateAssetMedias(CreateAssetMediasError),
|
|
AssetOperation(rbx_asset::cloud::AssetOperationError),
|
|
}
|
|
impl std::fmt::Display for PollOperationError{
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
write!(f,"{self:?}")
|
|
}
|
|
}
|
|
impl std::error::Error for PollOperationError{}
|
|
|
|
#[derive(Debug)]
|
|
#[allow(dead_code)]
|
|
enum DownloadDecalError{
|
|
PollOperation(PollOperationError),
|
|
ParseInt(std::num::ParseIntError),
|
|
Get(rbx_asset::cookie::GetError),
|
|
LoadDom(LoadDomError),
|
|
NoFirstInstance,
|
|
NoTextureProperty,
|
|
TexturePropertyInvalid,
|
|
}
|
|
impl std::fmt::Display for DownloadDecalError{
|
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
write!(f,"{self:?}")
|
|
}
|
|
}
|
|
impl std::error::Error for DownloadDecalError{}
|
|
|
|
async fn create_asset_medias(config:CreateAssetMediasConfig)->AResult<()>{
|
|
let context=CloudContext::new(config.api_key);
|
|
let cookie_context=CookieContext::new(config.cookie);
|
|
let expected_price=Some(config.expected_price.unwrap_or(0));
|
|
futures::stream::iter(config.input_files.into_iter()
|
|
//step 1: read file, make create request
|
|
.map(|path|{
|
|
let description=&config.description;
|
|
let creator=&config.creator;
|
|
let context=&context;
|
|
async move{(path.clone(),
|
|
async move{
|
|
let model_name=path.file_stem()
|
|
.and_then(std::ffi::OsStr::to_str)
|
|
.ok_or_else(||CreateAssetMediasError::NoFileStem(path.clone()))?
|
|
.to_owned();
|
|
let file=tokio::fs::read(path).await.map_err(CreateAssetMediasError::IO)?;
|
|
let asset_type=match file.get(0..4){
|
|
//png
|
|
Some(b"\x89PNG")=>rbx_asset::cloud::AssetType::Decal,
|
|
//jpeg
|
|
Some(b"\xFF\xD8\xFF\xE0")=>rbx_asset::cloud::AssetType::Decal,
|
|
//Some("fbx")=>rbx_asset::cloud::AssetType::Model,
|
|
//Some("ogg")=>rbx_asset::cloud::AssetType::Audio,
|
|
fourcc=>Err(CreateAssetMediasError::UnknownFourCC(fourcc.map(|s|s.try_into().unwrap())))?,
|
|
};
|
|
context.create_asset(rbx_asset::cloud::CreateAssetRequest{
|
|
assetType:asset_type,
|
|
displayName:model_name,
|
|
description:description.clone(),
|
|
creationContext:rbx_asset::cloud::CreationContext{
|
|
creator:creator.clone(),
|
|
expectedPrice:expected_price,
|
|
}
|
|
},file).await.map_err(CreateAssetMediasError::Create)
|
|
}
|
|
.await)}
|
|
}))
|
|
//parallel requests
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
|
//step 2: poll operation until it completes
|
|
.then(|(path,create_result)|{
|
|
let context=&context;
|
|
async{(path,
|
|
async{
|
|
let asset_operation=create_result.map_err(PollOperationError::CreateAssetMedias)?;
|
|
get_asset_exp_backoff(context,&asset_operation).await.map_err(PollOperationError::AssetOperation)
|
|
}
|
|
.await)}
|
|
})
|
|
//step 3: read decal id from operation and download it, decode it as a roblox file and extract the texture content url
|
|
.then(|(path,asset_response_result)|{
|
|
let cookie_context=&cookie_context;
|
|
async move{(path,
|
|
async move{
|
|
let asset_response=asset_response_result.map_err(DownloadDecalError::PollOperation)?;
|
|
let file=cookie_context.get_asset(rbx_asset::cookie::GetAssetRequest{
|
|
asset_id:asset_response.assetId.parse().map_err(DownloadDecalError::ParseInt)?,
|
|
version:None,
|
|
}).await.map_err(DownloadDecalError::Get)?;
|
|
let dom=load_dom(std::io::Cursor::new(file)).map_err(DownloadDecalError::LoadDom)?;
|
|
let instance=dom.get_by_ref(
|
|
*dom.root().children().first().ok_or(DownloadDecalError::NoFirstInstance)?
|
|
).ok_or(DownloadDecalError::NoFirstInstance)?;
|
|
let texture=instance.properties.get("Texture").ok_or(DownloadDecalError::NoTextureProperty)?;
|
|
let asset_url=match texture{
|
|
rbx_dom_weak::types::Variant::Content(url)=>url.clone().into_string(),
|
|
_=>Err(DownloadDecalError::TexturePropertyInvalid)?,
|
|
};
|
|
Ok::<_,DownloadDecalError>((asset_response.displayName,asset_url))
|
|
}
|
|
.await)}
|
|
})
|
|
.for_each(|(path,download_decal_result)|async move{
|
|
match download_decal_result{
|
|
Ok((file_name,asset_url))=>println!("{}={}",file_name,asset_url),
|
|
Err(e)=>eprintln!("ERROR file={:?} error={e}",path),
|
|
}
|
|
}).await;
|
|
Ok(())
|
|
}
|
|
|
|
struct UploadAssetConfig{
|
|
cookie:Cookie,
|
|
asset_id:AssetID,
|
|
change_name:Option<String>,
|
|
change_description:Option<String>,
|
|
change_free_model:Option<bool>,
|
|
change_allow_comments:Option<bool>,
|
|
group_id:Option<u64>,
|
|
input_file:PathBuf,
|
|
}
|
|
async fn upload_asset(config:UploadAssetConfig)->AResult<()>{
|
|
let context=CookieContext::new(config.cookie);
|
|
let resp=context.upload(rbx_asset::cookie::UploadRequest{
|
|
assetid:config.asset_id,
|
|
name:config.change_name,
|
|
description:config.change_description,
|
|
ispublic:config.change_free_model,
|
|
allowComments:config.change_allow_comments,
|
|
groupId:config.group_id,
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
println!("UploadResponse={:?}",resp);
|
|
Ok(())
|
|
}
|
|
|
|
struct UploadAssetMediaConfig{
|
|
api_key:ApiKey,
|
|
asset_id:u64,
|
|
input_file:PathBuf,
|
|
}
|
|
async fn upload_asset_media(config:UploadAssetMediaConfig)->AResult<()>{
|
|
let context=CloudContext::new(config.api_key);
|
|
let resp=context.update_asset(rbx_asset::cloud::UpdateAssetRequest{
|
|
assetId:config.asset_id,
|
|
displayName:None,
|
|
description:None,
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
println!("UploadResponse={:?}",resp);
|
|
Ok(())
|
|
}
|
|
|
|
struct UploadPlaceConfig{
|
|
api_key:ApiKey,
|
|
place_id:u64,
|
|
universe_id:u64,
|
|
input_file:PathBuf,
|
|
}
|
|
async fn upload_place(config:UploadPlaceConfig)->AResult<()>{
|
|
let context=CloudContext::new(config.api_key);
|
|
context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
|
placeId:config.place_id,
|
|
universeId:config.universe_id,
|
|
},tokio::fs::read(config.input_file).await?).await?;
|
|
Ok(())
|
|
}
|
|
|
|
async fn download_list(cookie:Cookie,asset_id_file_map:AssetIDFileMap)->AResult<()>{
|
|
let context=CookieContext::new(cookie);
|
|
futures::stream::iter(asset_id_file_map.into_iter()
|
|
.map(|(asset_id,file)|{
|
|
let context=&context;
|
|
async move{
|
|
Ok((file,context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:None}).await?))
|
|
}
|
|
}))
|
|
.buffer_unordered(CONCURRENT_REQUESTS)
|
|
.for_each(|b:AResult<_>|async{
|
|
match b{
|
|
Ok((dest,data))=>if let Err(e)=tokio::fs::write(dest,data).await{
|
|
eprintln!("fs error: {}",e);
|
|
},
|
|
Err(e)=>eprintln!("dl error: {}",e),
|
|
}
|
|
}).await;
|
|
Ok(())
|
|
}
|
|
|
|
async fn get_inventory_pages(context:&CookieContext,group:u64)->AResult<Vec<InventoryItem>>{
|
|
let mut cursor:Option<String>=None;
|
|
let mut asset_list=Vec::new();
|
|
loop{
|
|
let mut page=context.get_inventory_page(rbx_asset::cookie::InventoryPageRequest{group,cursor}).await?;
|
|
asset_list.append(&mut page.data);
|
|
if page.nextPageCursor.is_none(){
|
|
break;
|
|
}
|
|
cursor=page.nextPageCursor;
|
|
}
|
|
Ok(asset_list)
|
|
}
|
|
|
|
async fn download_group_inventory_json(cookie:Cookie,group:u64,output_folder:PathBuf)->AResult<()>{
|
|
let context=CookieContext::new(cookie);
|
|
let item_list=get_inventory_pages(&context,group).await?;
|
|
|
|
let mut path=output_folder.clone();
|
|
path.set_file_name("versions.json");
|
|
tokio::fs::write(path,serde_json::to_string(&item_list)?).await?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn get_version_history(context:&CookieContext,asset_id:AssetID)->AResult<Vec<AssetVersion>>{
|
|
let mut cursor:Option<String>=None;
|
|
let mut asset_list=Vec::new();
|
|
loop{
|
|
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id,cursor}).await?;
|
|
asset_list.append(&mut page.data);
|
|
if page.nextPageCursor.is_none(){
|
|
break;
|
|
}
|
|
cursor=page.nextPageCursor;
|
|
}
|
|
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
|
Ok(asset_list)
|
|
}
|
|
|
|
struct DownloadHistoryConfig{
|
|
continue_from_versions:bool,
|
|
end_version:Option<u64>,
|
|
start_version:u64,
|
|
output_folder:PathBuf,
|
|
cookie:Cookie,
|
|
asset_id:AssetID,
|
|
}
|
|
|
|
async fn download_history(mut config:DownloadHistoryConfig)->AResult<()>{
|
|
let mut asset_list_contents=std::collections::HashSet::new();
|
|
let mut asset_list:Vec<AssetVersion>=Vec::new();
|
|
if config.end_version.is_none()&&config.continue_from_versions{
|
|
//load prexisting versions list
|
|
let mut versions_path=config.output_folder.clone();
|
|
versions_path.push("versions.json");
|
|
match std::fs::File::open(versions_path){
|
|
Ok(versions_file)=>asset_list.append(&mut serde_json::from_reader(versions_file)?),
|
|
Err(e)=>match e.kind(){
|
|
std::io::ErrorKind::NotFound=>Err(anyhow::Error::msg("Cannot continue from versions.json - file does not exist"))?,
|
|
_=>Err(e)?,
|
|
}
|
|
}
|
|
//write down which versions are contained
|
|
for asset_version in &asset_list{
|
|
asset_list_contents.insert(asset_version.assetVersionNumber);
|
|
}
|
|
//find the highest number
|
|
match asset_list.iter().map(|asset_version|asset_version.assetVersionNumber).max(){
|
|
Some(max)=>{
|
|
//count down contiguously until a number is missing
|
|
for i in (1..=max).rev(){
|
|
if !asset_list_contents.contains(&i){
|
|
//that is end_version
|
|
config.end_version=Some(i);
|
|
break;
|
|
}
|
|
}
|
|
//if all versions are contained, set start_version to the max + 1
|
|
if config.end_version.is_none(){
|
|
config.start_version=max+1;
|
|
}
|
|
},
|
|
None=>Err(anyhow::Error::msg("Cannot continue from versions.json - there are no previous versions"))?,
|
|
}
|
|
}
|
|
let context=CookieContext::new(config.cookie);
|
|
|
|
//limit concurrent downloads
|
|
let mut join_set=tokio::task::JoinSet::new();
|
|
|
|
//poll paged list of all asset versions
|
|
let mut cursor:Option<String>=None;
|
|
loop{
|
|
let mut page=context.get_asset_versions_page(rbx_asset::cookie::AssetVersionsPageRequest{asset_id:config.asset_id,cursor}).await?;
|
|
let context=&context;
|
|
let output_folder=config.output_folder.clone();
|
|
let data=&page.data;
|
|
let asset_list_contents=&asset_list_contents;
|
|
let join_set=&mut join_set;
|
|
let error_catcher=||async move{
|
|
let mut cancel_paging=false;
|
|
for asset_version in data{
|
|
let version_number=asset_version.assetVersionNumber;
|
|
//skip assets beyond specified end_version
|
|
if config.end_version.is_some_and(|v|v<version_number){
|
|
continue;
|
|
}
|
|
//skip assets lower than start_version and cancel paging asset versions
|
|
if version_number<config.start_version{
|
|
cancel_paging=true;
|
|
continue;//don't trust roblox returned order
|
|
}
|
|
//skip previously downloaded assets
|
|
if asset_list_contents.contains(&version_number){
|
|
continue;
|
|
}
|
|
while CONCURRENT_REQUESTS<=join_set.len(){
|
|
join_set.join_next().await.unwrap()??;
|
|
}
|
|
let context=context.clone();
|
|
let mut path=output_folder.clone();
|
|
path.push(format!("{}_v{}.rbxl",config.asset_id,version_number));
|
|
join_set.spawn(async move{
|
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:Some(version_number)}).await?;
|
|
|
|
tokio::fs::write(path,file).await?;
|
|
|
|
Ok::<_,anyhow::Error>(())
|
|
});
|
|
}
|
|
Ok::<_,anyhow::Error>(cancel_paging)
|
|
};
|
|
let cancel_paging=match error_catcher().await{
|
|
Ok(cancel)=>cancel,
|
|
Err(e)=>{
|
|
println!("download error: {}",e);
|
|
//cancel download and write versions
|
|
true
|
|
},
|
|
};
|
|
if page.nextPageCursor.is_none()||cancel_paging{
|
|
for asset_version in page.data.into_iter(){
|
|
if !(asset_list_contents.contains(&asset_version.assetVersionNumber)
|
|
||config.end_version.is_some_and(|v|v<asset_version.assetVersionNumber)
|
|
||asset_version.assetVersionNumber<config.start_version){
|
|
asset_list.push(asset_version);
|
|
}
|
|
}
|
|
break;
|
|
}else{
|
|
asset_list.append(&mut page.data);
|
|
}
|
|
cursor=page.nextPageCursor;
|
|
}
|
|
|
|
asset_list.sort_by(|a,b|a.assetVersionNumber.cmp(&b.assetVersionNumber));
|
|
|
|
let mut path=config.output_folder.clone();
|
|
path.set_file_name("versions.json");
|
|
tokio::fs::write(path,serde_json::to_string(&asset_list)?).await?;
|
|
|
|
while let Some(result)=join_set.join_next().await{
|
|
result??;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
#[allow(dead_code)]
|
|
enum LoadDomError{
|
|
IO(std::io::Error),
|
|
RbxBinary(rbx_binary::DecodeError),
|
|
RbxXml(rbx_xml::DecodeError),
|
|
UnknownRobloxFile([u8;4]),
|
|
UnsupportedFile,
|
|
}
|
|
impl std::fmt::Display for LoadDomError{
|
|
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
|
|
write!(f,"{self:?}")
|
|
}
|
|
}
|
|
impl std::error::Error for LoadDomError{}
|
|
|
|
fn load_dom<R:Read>(input:R)->Result<rbx_dom_weak::WeakDom,LoadDomError>{
|
|
let mut buf=std::io::BufReader::new(input);
|
|
let peek=std::io::BufRead::fill_buf(&mut buf).map_err(LoadDomError::IO)?;
|
|
match &peek[0..4]{
|
|
b"<rob"=>{
|
|
match &peek[4..8]{
|
|
b"lox!"=>rbx_binary::from_reader(buf).map_err(LoadDomError::RbxBinary),
|
|
b"lox "=>rbx_xml::from_reader_default(buf).map_err(LoadDomError::RbxXml),
|
|
other=>Err(LoadDomError::UnknownRobloxFile(other.try_into().unwrap())),
|
|
}
|
|
},
|
|
_=>Err(LoadDomError::UnsupportedFile),
|
|
}
|
|
}
|
|
|
|
|
|
struct DecompileConfig{
|
|
style:rox_compiler::Style,
|
|
input_file:PathBuf,
|
|
output_folder:PathBuf,
|
|
write_template:bool,
|
|
write_models:bool,
|
|
write_scripts:bool,
|
|
}
|
|
|
|
async fn decompile(config:DecompileConfig)->AResult<()>{
|
|
//rules:
|
|
//Class Script|LocalScript|ModuleScript->$Name.lua
|
|
//Class Model->$Name.rbxmx
|
|
//overrides.json per-folder [Override{name,class}]
|
|
//Everything else goes into template.rbxlx
|
|
|
|
//read file
|
|
let dom=load_dom(std::io::BufReader::new(std::fs::File::open(config.input_file)?))?;
|
|
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
|
|
|
//generate folders, models, and scripts
|
|
//delete models and scripts from dom
|
|
context.write_files(rox_compiler::WriteConfig{
|
|
style:config.style,
|
|
output_folder:config.output_folder,
|
|
write_template:config.write_template,
|
|
write_models:config.write_models,
|
|
write_scripts:config.write_scripts,
|
|
}).await?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
struct DownloadDecompileConfig{
|
|
cookie:Cookie,
|
|
asset_id:AssetID,
|
|
style:rox_compiler::Style,
|
|
output_folder:PathBuf,
|
|
write_template:bool,
|
|
write_models:bool,
|
|
write_scripts:bool,
|
|
}
|
|
|
|
async fn download_decompile(config:DownloadDecompileConfig)->AResult<()>{
|
|
let context=CookieContext::new(config.cookie);
|
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id:config.asset_id,version:None}).await?;
|
|
|
|
let dom=load_dom(std::io::Cursor::new(file))?;
|
|
let context=rox_compiler::DecompiledContext::from_dom(dom);
|
|
|
|
context.write_files(rox_compiler::WriteConfig{
|
|
style:config.style,
|
|
output_folder:config.output_folder,
|
|
write_template:config.write_template,
|
|
write_models:config.write_models,
|
|
write_scripts:config.write_scripts,
|
|
}).await?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
struct WriteCommitConfig{
|
|
git_committer_name:String,
|
|
git_committer_email:String,
|
|
output_folder:PathBuf,
|
|
style:rox_compiler::Style,
|
|
write_template:bool,
|
|
write_models:bool,
|
|
write_scripts:bool,
|
|
}
|
|
|
|
async fn write_commit(config:WriteCommitConfig,b:Result<AResult<(AssetVersion,rox_compiler::DecompiledContext)>,tokio::task::JoinError>,repo:&git2::Repository)->AResult<()>{
|
|
let (asset_version,context)=b??;
|
|
println!("writing files for version {}",asset_version.assetVersionNumber);
|
|
|
|
//clean output dir
|
|
if config.write_models||config.write_scripts{
|
|
let mut src=config.output_folder.clone();
|
|
src.push("src");
|
|
match std::fs::remove_dir_all(src){
|
|
Ok(())=>(),
|
|
Err(e)=>println!("remove_dir_all src failed {}",e),
|
|
}
|
|
}
|
|
if config.write_template{
|
|
let mut template=config.output_folder.clone();
|
|
template.push("template.rbxlx");
|
|
match std::fs::remove_file(template){
|
|
Ok(())=>(),
|
|
Err(e)=>println!("remove_file template.rbxlx failed {}",e),
|
|
}
|
|
}
|
|
|
|
//write files
|
|
context.write_files(rox_compiler::WriteConfig{
|
|
style:config.style,
|
|
output_folder:config.output_folder.clone(),
|
|
write_template:config.write_template,
|
|
write_models:config.write_models,
|
|
write_scripts:config.write_scripts,
|
|
}).await?;
|
|
|
|
let date=asset_version.created;
|
|
//let sig=repo.signature()?; //this pulls default name and email
|
|
let sig=git2::Signature::new(config.git_committer_name.as_str(),config.git_committer_email.as_str(),&git2::Time::new(date.timestamp(),0)).unwrap();
|
|
let tree_id={
|
|
let mut tree_index = repo.index()?;
|
|
match tree_index.add_all(std::iter::once("*"),git2::IndexAddOption::DEFAULT,None){
|
|
Ok(_)=>(),
|
|
Err(e)=>println!("tree_index.add_all error: {}",e),
|
|
}
|
|
match tree_index.update_all(std::iter::once("*"),None){
|
|
Ok(_)=>(),
|
|
Err(e)=>println!("tree_index.update_all error: {}",e),
|
|
}
|
|
tree_index.write()?;
|
|
tree_index.write_tree()?
|
|
};
|
|
let tree=repo.find_tree(tree_id)?;
|
|
|
|
let mut parents=Vec::new();
|
|
|
|
match repo.head(){
|
|
Ok(reference)=>{
|
|
let commit=reference.peel_to_commit()?;
|
|
|
|
//test tree against commit tree to see if there is any changes
|
|
let commit_tree=commit.tree()?;
|
|
let diff=repo.diff_tree_to_tree(Some(&commit_tree),Some(&tree),None)?;
|
|
if diff.get_delta(0).is_none(){
|
|
println!("no changes");
|
|
return Ok(());
|
|
}
|
|
|
|
parents.push(commit);
|
|
},
|
|
Err(e)=>println!("repo head error {:?}",e),
|
|
};
|
|
|
|
repo.commit(
|
|
Some("HEAD"),//update_ref
|
|
&sig,//author
|
|
&sig,//commiter
|
|
&format!("v{}", asset_version.assetVersionNumber),//message
|
|
&tree,//tree (basically files)
|
|
parents.iter().collect::<Vec<&git2::Commit<'_>>>().as_slice(),//parents
|
|
)?;
|
|
|
|
//commit
|
|
Ok(())
|
|
}
|
|
|
|
struct DecompileHistoryConfig{
|
|
git_committer_name:String,
|
|
git_committer_email:String,
|
|
input_folder:PathBuf,
|
|
style:rox_compiler::Style,
|
|
output_folder:PathBuf,
|
|
write_template:bool,
|
|
write_models:bool,
|
|
write_scripts:bool,
|
|
}
|
|
|
|
async fn decompile_history_into_git(config:DecompileHistoryConfig)->AResult<()>{
|
|
//use prexisting versions list
|
|
let mut versions_path=config.input_folder.clone();
|
|
versions_path.push("versions.json");
|
|
let asset_list:Vec<AssetVersion>=serde_json::from_reader(std::fs::File::open(versions_path)?)?;
|
|
|
|
let repo=git2::Repository::init(config.output_folder.as_path())?;
|
|
|
|
//decompile all versions
|
|
futures::stream::iter(asset_list.into_iter()
|
|
.map(|asset_version|{
|
|
let mut file_path=config.input_folder.clone();
|
|
tokio::task::spawn_blocking(move||{
|
|
file_path.push(format!("{}_v{}.rbxl",asset_version.assetId,asset_version.assetVersionNumber));
|
|
let file=std::fs::File::open(file_path)?;
|
|
let dom=load_dom(file)?;
|
|
let contents=rox_compiler::DecompiledContext::from_dom(dom);
|
|
Ok::<_,anyhow::Error>((asset_version,contents))
|
|
})
|
|
}))
|
|
.buffered(CONCURRENT_DECODE)
|
|
.for_each(|join_handle_result|async{
|
|
match write_commit(WriteCommitConfig{
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
git_committer_email:config.git_committer_email.clone(),
|
|
style:config.style,
|
|
output_folder:config.output_folder.clone(),
|
|
write_template:config.write_template,
|
|
write_models:config.write_models,
|
|
write_scripts:config.write_scripts,
|
|
},join_handle_result,&repo).await{
|
|
Ok(())=>(),
|
|
Err(e)=>println!("decompile/write/commit error: {}",e),
|
|
}
|
|
}).await;
|
|
Ok(())
|
|
}
|
|
|
|
struct DownloadAndDecompileHistoryConfig{
|
|
cookie:Cookie,
|
|
asset_id:AssetID,
|
|
git_committer_name:String,
|
|
git_committer_email:String,
|
|
style:rox_compiler::Style,
|
|
output_folder:PathBuf,
|
|
write_template:bool,
|
|
write_models:bool,
|
|
write_scripts:bool,
|
|
}
|
|
|
|
async fn download_and_decompile_history_into_git(config:DownloadAndDecompileHistoryConfig)->AResult<()>{
|
|
let context=CookieContext::new(config.cookie);
|
|
|
|
//poll paged list of all asset versions
|
|
let asset_list=get_version_history(&context,config.asset_id).await?;
|
|
|
|
let repo=git2::Repository::init(config.output_folder.clone())?;
|
|
|
|
//download all versions
|
|
let asset_id=config.asset_id;
|
|
futures::stream::iter(asset_list.into_iter()
|
|
.map(|asset_version|{
|
|
let context=context.clone();
|
|
tokio::task::spawn(async move{
|
|
let file=context.get_asset(rbx_asset::cookie::GetAssetRequest{asset_id,version:Some(asset_version.assetVersionNumber)}).await?;
|
|
let dom=load_dom(std::io::Cursor::new(file))?;
|
|
Ok::<_,anyhow::Error>((asset_version,rox_compiler::DecompiledContext::from_dom(dom)))
|
|
})
|
|
}))
|
|
.buffered(CONCURRENT_DECODE)
|
|
.for_each(|join_handle_result|async{
|
|
match write_commit(WriteCommitConfig{
|
|
style:config.style,
|
|
git_committer_name:config.git_committer_name.clone(),
|
|
git_committer_email:config.git_committer_email.clone(),
|
|
output_folder:config.output_folder.clone(),
|
|
write_template:config.write_template,
|
|
write_models:config.write_models,
|
|
write_scripts:config.write_scripts,
|
|
},join_handle_result,&repo).await{
|
|
Ok(())=>(),
|
|
Err(e)=>println!("download/unzip/decompile/write/commit error: {}",e),
|
|
}
|
|
}).await;
|
|
Ok(())
|
|
}
|
|
|
|
struct CompileConfig{
|
|
input_folder:PathBuf,
|
|
output_file:PathBuf,
|
|
template:Option<PathBuf>,
|
|
style:Option<rox_compiler::Style>,
|
|
}
|
|
|
|
async fn compile(config:CompileConfig)->AResult<()>{
|
|
//basically decompile in reverse order
|
|
//load template dom
|
|
let mut dom=match config.template{
|
|
//mr dom doesn't like tokio files
|
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
|
};
|
|
|
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
|
input_folder:config.input_folder,
|
|
style:config.style,
|
|
},&mut dom).await?;
|
|
|
|
let mut output_place=config.output_file.clone();
|
|
if output_place.extension().is_none()&&tokio::fs::try_exists(output_place.as_path()).await?{
|
|
output_place.push("place.rbxl");
|
|
}
|
|
let output=std::io::BufWriter::new(std::fs::File::create(output_place)?);
|
|
//write inner objects
|
|
rbx_binary::to_writer(output,&dom,dom.root().children())?;
|
|
Ok(())
|
|
}
|
|
|
|
struct CompileUploadAssetConfig{
|
|
input_folder:PathBuf,
|
|
template:Option<PathBuf>,
|
|
style:Option<rox_compiler::Style>,
|
|
cookie:Cookie,
|
|
group_id:Option<u64>,
|
|
asset_id:AssetID,
|
|
}
|
|
async fn compile_upload_asset(config:CompileUploadAssetConfig)->AResult<()>{
|
|
let mut dom=match config.template{
|
|
//mr dom doesn't like tokio files
|
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
|
};
|
|
|
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
|
input_folder:config.input_folder,
|
|
style:config.style,
|
|
},&mut dom).await?;
|
|
|
|
//make a binary file in a buffer in memory
|
|
let mut data=Vec::new();
|
|
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
|
|
|
//upload it
|
|
let context=CookieContext::new(config.cookie);
|
|
let resp=context.upload(rbx_asset::cookie::UploadRequest{
|
|
groupId:config.group_id,
|
|
assetid:config.asset_id,
|
|
name:None,
|
|
description:None,
|
|
ispublic:None,
|
|
allowComments:None,
|
|
},data).await?;
|
|
println!("UploadResponse={:?}",resp);
|
|
Ok(())
|
|
}
|
|
|
|
struct CompileUploadPlaceConfig{
|
|
input_folder:PathBuf,
|
|
template:Option<PathBuf>,
|
|
style:Option<rox_compiler::Style>,
|
|
api_key:ApiKey,
|
|
place_id:u64,
|
|
universe_id:u64,
|
|
}
|
|
async fn compile_upload_place(config:CompileUploadPlaceConfig)->AResult<()>{
|
|
let mut dom=match config.template{
|
|
//mr dom doesn't like tokio files
|
|
Some(template_path)=>load_dom(std::io::BufReader::new(std::fs::File::open(template_path)?))?,
|
|
None=>rbx_dom_weak::WeakDom::new(rbx_dom_weak::InstanceBuilder::new("DataModel")),
|
|
};
|
|
|
|
rox_compiler::compile(rox_compiler::CompileConfig{
|
|
input_folder:config.input_folder,
|
|
style:config.style,
|
|
},&mut dom).await?;
|
|
|
|
//make a binary file in a buffer in memory
|
|
let mut data=Vec::new();
|
|
rbx_binary::to_writer(std::io::Cursor::new(&mut data),&dom,dom.root().children())?;
|
|
|
|
//upload it
|
|
let context=CloudContext::new(config.api_key);
|
|
let resp=context.update_place(rbx_asset::cloud::UpdatePlaceRequest{
|
|
universeId:config.universe_id,
|
|
placeId:config.place_id,
|
|
},data).await?;
|
|
println!("UploadResponse={:?}",resp);
|
|
Ok(())
|
|
}
|