forked from StrafesNET/roblox-bot-player
510 lines
14 KiB
Rust
510 lines
14 KiB
Rust
use std::num::NonZeroU32;
|
|
use std::path::PathBuf;
|
|
use strafesnet_common::session::Time as SessionTime;
|
|
|
|
#[derive(clap::Subcommand)]
|
|
pub enum Commands{
|
|
Encode(EncodeSubcommand),
|
|
}
|
|
|
|
impl Commands{
|
|
pub fn run(self){
|
|
match self{
|
|
Commands::Encode(command)=>command.run().unwrap(),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(clap::Args)]
|
|
pub struct EncodeSubcommand{
|
|
#[arg(long,short)]
|
|
map:PathBuf,
|
|
#[arg(long,short)]
|
|
bot:PathBuf,
|
|
#[arg(long,short)]
|
|
output_file:Option<PathBuf>,
|
|
#[arg(long,short)]
|
|
width:Option<NonZeroU32>,
|
|
#[arg(long,short)]
|
|
height:Option<NonZeroU32>,
|
|
#[arg(long)]
|
|
fps:Option<u32>,
|
|
#[arg(long)]
|
|
target_bitrate:Option<u64>,
|
|
#[arg(long)]
|
|
max_bitrate:Option<u64>,
|
|
#[arg(long)]
|
|
device:Option<String>,
|
|
}
|
|
impl EncodeSubcommand{
|
|
fn run(self)->Result<(),EncodeError>{
|
|
encode(EncodeParams{
|
|
width:self.width.unwrap_or(NonZeroU32::new(1920).unwrap()),
|
|
height:self.width.unwrap_or(NonZeroU32::new(1080).unwrap()),
|
|
target_framerate:self.fps.unwrap_or(60),
|
|
average_bitrate:self.target_bitrate.unwrap_or(6_000_000),
|
|
max_bitrate:self.max_bitrate.unwrap_or(6_000_000),
|
|
device:self.device,
|
|
output_file:self.output_file.unwrap_or_else(||{
|
|
let mut output_file:PathBuf=self.bot.file_stem().unwrap().into();
|
|
output_file.set_extension("mp4");
|
|
output_file
|
|
}),
|
|
map:self.map,
|
|
bot:self.bot,
|
|
})
|
|
}
|
|
}
|
|
|
|
#[expect(dead_code)]
|
|
#[derive(Debug)]
|
|
enum EncodeError{
|
|
ReadMap(std::io::Error),
|
|
ReadBot(std::io::Error),
|
|
DecodeSNF(strafesnet_snf::Error),
|
|
DecodeMap(strafesnet_snf::map::Error),
|
|
DecodeBot(strafesnet_roblox_bot_file::v0::Error),
|
|
CreateInstance(vk_video::VulkanInitError),
|
|
CreateAdapter(vk_video::VulkanInitError),
|
|
NoAdapter,
|
|
CreateDevice(vk_video::VulkanInitError),
|
|
VideoEncodeParams(vk_video::VulkanEncoderError),
|
|
VideoCreateTextures(vk_video::VulkanEncoderError),
|
|
VideoEncodeFrame(vk_video::VulkanEncoderError),
|
|
OutputCreateFile(std::io::Error),
|
|
OutputMp4Start(mp4::Error),
|
|
OutputMp4AddTrack(mp4::Error),
|
|
OutputMp4WriteSample(mp4::Error),
|
|
OutputMp4End(mp4::Error),
|
|
}
|
|
|
|
struct EncodeParams{
|
|
width:NonZeroU32,
|
|
height:NonZeroU32,
|
|
target_framerate:u32,
|
|
average_bitrate:u64,
|
|
max_bitrate:u64,
|
|
device:Option<String>,
|
|
map:PathBuf,
|
|
bot:PathBuf,
|
|
output_file:PathBuf,
|
|
}
|
|
|
|
fn encode(params:EncodeParams)->Result<(),EncodeError>{
|
|
let size = glam::uvec2(params.width.get(),params.height.get());
|
|
let target_framerate = params.target_framerate;
|
|
let average_bitrate = params.average_bitrate;
|
|
let max_bitrate = params.max_bitrate;
|
|
|
|
let map_file=std::fs::read(params.map).map_err(EncodeError::ReadMap)?;
|
|
let bot_file=std::fs::read(params.bot).map_err(EncodeError::ReadBot)?;
|
|
|
|
// read files
|
|
let map=strafesnet_snf::read_map(std::io::Cursor::new(map_file))
|
|
.map_err(EncodeError::DecodeSNF)?
|
|
.into_complete_map()
|
|
.map_err(EncodeError::DecodeMap)?;
|
|
let timelines=strafesnet_roblox_bot_file::v0::read_all_to_block(std::io::Cursor::new(bot_file))
|
|
.map_err(EncodeError::DecodeBot)?;
|
|
|
|
// vulkan init
|
|
let vulkan_instance = vk_video::VulkanInstance::new().map_err(EncodeError::CreateInstance)?;
|
|
let vulkan_adapter = if let Some(filter)=params.device.as_deref(){
|
|
vulkan_instance.iter_adapters(None)
|
|
.map_err(EncodeError::CreateAdapter)?
|
|
.find(|adapter|adapter.info().name.contains(filter))
|
|
.ok_or(EncodeError::NoAdapter)?
|
|
}else{
|
|
vulkan_instance.create_adapter(None).map_err(EncodeError::CreateAdapter)?
|
|
};
|
|
let vulkan_device = vulkan_adapter
|
|
.create_device(
|
|
wgpu::Features::TEXTURE_COMPRESSION_BC,
|
|
wgpu::ExperimentalFeatures::disabled(),
|
|
wgpu::Limits::defaults(),
|
|
)
|
|
.map_err(EncodeError::CreateDevice)?;
|
|
|
|
// playback
|
|
let bot=strafesnet_roblox_bot_player::bot::CompleteBot::new(timelines);
|
|
let mut playback_head=strafesnet_roblox_bot_player::head::PlaybackHead::new(&bot,SessionTime::ZERO);
|
|
|
|
let mut wgpu_state = WgpuState::new(
|
|
vulkan_device.wgpu_device(),
|
|
vulkan_device.wgpu_queue(),
|
|
size,
|
|
);
|
|
|
|
wgpu_state.change_map(&map);
|
|
|
|
let mut encoder = vulkan_device
|
|
.create_wgpu_textures_encoder(
|
|
vulkan_device
|
|
.encoder_parameters_high_quality(
|
|
vk_video::parameters::VideoParameters {
|
|
width:params.width,
|
|
height:params.height,
|
|
target_framerate:target_framerate.into(),
|
|
},
|
|
vk_video::parameters::RateControl::VariableBitrate {
|
|
average_bitrate,
|
|
max_bitrate,
|
|
virtual_buffer_size: std::time::Duration::from_secs(2),
|
|
},
|
|
)
|
|
.map_err(EncodeError::VideoEncodeParams)?,
|
|
)
|
|
.map_err(EncodeError::VideoCreateTextures)?;
|
|
|
|
let output_file=std::fs::File::create(params.output_file)
|
|
.map_err(EncodeError::OutputCreateFile)?;
|
|
|
|
let mp4_config=mp4::Mp4Config{
|
|
major_brand: str::parse("isom").unwrap(),
|
|
minor_version: 512,
|
|
compatible_brands: vec![
|
|
str::parse("isom").unwrap(),
|
|
str::parse("iso2").unwrap(),
|
|
str::parse("avc1").unwrap(),
|
|
str::parse("mp41").unwrap(),
|
|
],
|
|
timescale:target_framerate,
|
|
};
|
|
let mut mp4=mp4::Mp4Writer::write_start(output_file,&mp4_config)
|
|
.map_err(EncodeError::OutputMp4Start)?;
|
|
|
|
let avc_config=mp4::AvcConfig{
|
|
width:params.width.get() as u16,
|
|
height:params.height.get() as u16,
|
|
// make up some data to prevent this underdeveloped library from crashing
|
|
seq_param_set:vec![0,0,0,0],
|
|
pic_param_set:vec![],
|
|
};
|
|
let track_config=mp4::TrackConfig{
|
|
track_type:mp4::TrackType::Video,
|
|
timescale:target_framerate,
|
|
language:"eng".to_owned(),
|
|
media_conf:mp4::MediaConfig::AvcConfig(avc_config),
|
|
};
|
|
|
|
const TRACK_ID:u32=1;
|
|
mp4.add_track(&track_config)
|
|
.map_err(EncodeError::OutputMp4AddTrack)?;
|
|
|
|
let duration = bot.duration();
|
|
for i in 0..duration.get()*target_framerate as i64/SessionTime::ONE_SECOND.get() {
|
|
let time=SessionTime::raw(i*SessionTime::ONE_SECOND.get()/target_framerate as i64);
|
|
playback_head.advance_time(&bot,time);
|
|
let (pos,angles)=playback_head.get_position_angles(&bot,time);
|
|
wgpu_state.render(pos,angles);
|
|
|
|
let frame=vk_video::Frame{
|
|
data:wgpu_state.video_texture.clone(),
|
|
pts:None,
|
|
};
|
|
let res=unsafe{encoder.encode(frame,false)}
|
|
.map_err(EncodeError::VideoEncodeFrame)?;
|
|
|
|
let mp4_sample=mp4::Mp4Sample{
|
|
start_time:i as u64,
|
|
duration:1,
|
|
rendering_offset:0,
|
|
is_sync:false,
|
|
bytes:res.data.into(),
|
|
};
|
|
mp4.write_sample(TRACK_ID,&mp4_sample)
|
|
.map_err(EncodeError::OutputMp4WriteSample)?;
|
|
}
|
|
|
|
mp4.write_end()
|
|
.map_err(EncodeError::OutputMp4End)?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
struct WgpuState {
|
|
device: wgpu::Device,
|
|
queue: wgpu::Queue,
|
|
// graphics output
|
|
graphics:strafesnet_roblox_bot_player::graphics::Graphics,
|
|
// not sure if this needs to stay bound to keep the TextureView valid
|
|
#[expect(unused)]
|
|
graphics_texture: wgpu::Texture,
|
|
graphics_texture_view: wgpu::TextureView,
|
|
// video output
|
|
video_texture: wgpu::Texture,
|
|
y_renderer: PlaneRenderer,
|
|
uv_renderer: PlaneRenderer,
|
|
}
|
|
|
|
impl WgpuState {
|
|
fn new(
|
|
device: wgpu::Device,
|
|
queue: wgpu::Queue,
|
|
size: glam::UVec2,
|
|
) -> WgpuState {
|
|
const FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba8UnormSrgb;
|
|
let graphics = strafesnet_roblox_bot_player::graphics::Graphics::new(&device,&queue,size,FORMAT);
|
|
|
|
let shader = wgpu::include_wgsl!("../shaders/rgb_to_yuv.wgsl");
|
|
let shader = device.create_shader_module(shader);
|
|
|
|
let graphics_texture_bind_group_layout=device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{
|
|
label:Some("RGB Bind Group Layout"),
|
|
entries:&[
|
|
wgpu::BindGroupLayoutEntry{
|
|
binding:0,
|
|
visibility:wgpu::ShaderStages::FRAGMENT,
|
|
ty:wgpu::BindingType::Texture{
|
|
sample_type:wgpu::TextureSampleType::Float{filterable:true},
|
|
multisampled:false,
|
|
view_dimension:wgpu::TextureViewDimension::D2,
|
|
},
|
|
count:None,
|
|
},
|
|
wgpu::BindGroupLayoutEntry{
|
|
binding:1,
|
|
visibility:wgpu::ShaderStages::FRAGMENT,
|
|
ty:wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
|
|
count:None,
|
|
},
|
|
],
|
|
});
|
|
|
|
let graphics_texture=device.create_texture(&wgpu::TextureDescriptor{
|
|
label:Some("RGB texture"),
|
|
format:FORMAT,
|
|
size:wgpu::Extent3d{
|
|
width:size.x,
|
|
height:size.y,
|
|
depth_or_array_layers:1,
|
|
},
|
|
mip_level_count:1,
|
|
sample_count:1,
|
|
dimension:wgpu::TextureDimension::D2,
|
|
usage:wgpu::TextureUsages::RENDER_ATTACHMENT|wgpu::TextureUsages::TEXTURE_BINDING,
|
|
view_formats:&[],
|
|
});
|
|
let graphics_texture_view = graphics_texture.create_view(&wgpu::TextureViewDescriptor {
|
|
label: Some("RGB texture view"),
|
|
aspect: wgpu::TextureAspect::All,
|
|
usage: Some(wgpu::TextureUsages::RENDER_ATTACHMENT|wgpu::TextureUsages::TEXTURE_BINDING),
|
|
..Default::default()
|
|
});
|
|
let clamp_sampler=device.create_sampler(&wgpu::SamplerDescriptor{
|
|
label:Some("Clamp Sampler"),
|
|
address_mode_u:wgpu::AddressMode::ClampToEdge,
|
|
address_mode_v:wgpu::AddressMode::ClampToEdge,
|
|
address_mode_w:wgpu::AddressMode::ClampToEdge,
|
|
mag_filter:wgpu::FilterMode::Linear,
|
|
min_filter:wgpu::FilterMode::Linear,
|
|
mipmap_filter:wgpu::MipmapFilterMode::Linear,
|
|
..Default::default()
|
|
});
|
|
let graphics_texture_bind_group=device.create_bind_group(&wgpu::BindGroupDescriptor{
|
|
layout:&graphics_texture_bind_group_layout,
|
|
entries:&[
|
|
wgpu::BindGroupEntry{
|
|
binding:0,
|
|
resource:wgpu::BindingResource::TextureView(&graphics_texture_view),
|
|
},
|
|
wgpu::BindGroupEntry{
|
|
binding:1,
|
|
resource:wgpu::BindingResource::Sampler(&clamp_sampler),
|
|
},
|
|
],
|
|
label:Some("Graphics Texture"),
|
|
});
|
|
|
|
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
|
|
label: Some("wgpu pipeline layout"),
|
|
bind_group_layouts: &[
|
|
&graphics_texture_bind_group_layout
|
|
],
|
|
immediate_size: 0,
|
|
});
|
|
|
|
let video_texture = device.create_texture(&wgpu::TextureDescriptor {
|
|
label: Some("wgpu render target"),
|
|
format: wgpu::TextureFormat::NV12,
|
|
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
|
|
dimension: wgpu::TextureDimension::D2,
|
|
sample_count: 1,
|
|
view_formats: &[],
|
|
mip_level_count: 1,
|
|
size: wgpu::Extent3d {
|
|
width: size.x,
|
|
height: size.y,
|
|
depth_or_array_layers: 1,
|
|
},
|
|
});
|
|
|
|
let y_renderer = PlaneRenderer::new(
|
|
&device,
|
|
&pipeline_layout,
|
|
&shader,
|
|
"fs_main_y",
|
|
&video_texture,
|
|
wgpu::TextureAspect::Plane0,
|
|
graphics_texture_bind_group.clone(),
|
|
);
|
|
let uv_renderer = PlaneRenderer::new(
|
|
&device,
|
|
&pipeline_layout,
|
|
&shader,
|
|
"fs_main_uv",
|
|
&video_texture,
|
|
wgpu::TextureAspect::Plane1,
|
|
graphics_texture_bind_group,
|
|
);
|
|
|
|
WgpuState {
|
|
device,
|
|
queue,
|
|
graphics,
|
|
graphics_texture,
|
|
graphics_texture_view,
|
|
video_texture,
|
|
y_renderer,
|
|
uv_renderer,
|
|
}
|
|
}
|
|
|
|
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
|
|
self.graphics.change_map(&self.device,&self.queue,map);
|
|
}
|
|
|
|
fn render(&mut self,pos:glam::Vec3,angles:glam::Vec2) {
|
|
let mut encoder = self
|
|
.device
|
|
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
|
|
label: Some("wgpu encoder"),
|
|
});
|
|
|
|
self.graphics.encode_commands(&mut encoder,&self.graphics_texture_view,pos,angles);
|
|
|
|
self.y_renderer.render(&mut encoder);
|
|
self.uv_renderer.render(&mut encoder);
|
|
|
|
encoder.transition_resources(
|
|
[].into_iter(),
|
|
[wgpu::TextureTransition {
|
|
texture: &self.video_texture,
|
|
state: wgpu::TextureUses::COPY_SRC,
|
|
selector: None,
|
|
}]
|
|
.into_iter(),
|
|
);
|
|
|
|
let buffer = encoder.finish();
|
|
|
|
self.queue.submit([buffer]);
|
|
}
|
|
}
|
|
|
|
struct PlaneRenderer {
|
|
graphics_texture_bind_group: wgpu::BindGroup,
|
|
pipeline: wgpu::RenderPipeline,
|
|
plane: wgpu::TextureAspect,
|
|
plane_view: wgpu::TextureView,
|
|
}
|
|
|
|
impl PlaneRenderer {
|
|
fn new(
|
|
device: &wgpu::Device,
|
|
pipeline_layout: &wgpu::PipelineLayout,
|
|
shader: &wgpu::ShaderModule,
|
|
fragment_entry_point: &str,
|
|
texture: &wgpu::Texture,
|
|
plane: wgpu::TextureAspect,
|
|
graphics_texture_bind_group: wgpu::BindGroup,
|
|
) -> Self {
|
|
let format = texture.format().aspect_specific_format(plane).unwrap();
|
|
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
|
label: Some("wgpu pipeline"),
|
|
layout: Some(pipeline_layout),
|
|
cache: None,
|
|
vertex: wgpu::VertexState {
|
|
module: shader,
|
|
buffers: &[],
|
|
entry_point: None,
|
|
compilation_options: Default::default(),
|
|
},
|
|
fragment: Some(wgpu::FragmentState {
|
|
module: shader,
|
|
entry_point: Some(fragment_entry_point),
|
|
compilation_options: Default::default(),
|
|
targets: &[Some(wgpu::ColorTargetState {
|
|
blend: None,
|
|
format,
|
|
write_mask: wgpu::ColorWrites::ALL,
|
|
})],
|
|
}),
|
|
primitive: wgpu::PrimitiveState {
|
|
topology: wgpu::PrimitiveTopology::TriangleList,
|
|
cull_mode: Some(wgpu::Face::Back),
|
|
polygon_mode: wgpu::PolygonMode::Fill,
|
|
front_face: wgpu::FrontFace::Cw,
|
|
conservative: false,
|
|
unclipped_depth: false,
|
|
strip_index_format: None,
|
|
},
|
|
multiview_mask: None,
|
|
multisample: wgpu::MultisampleState {
|
|
count: 1,
|
|
mask: !0,
|
|
alpha_to_coverage_enabled: false,
|
|
},
|
|
depth_stencil: None,
|
|
});
|
|
|
|
let plane_view = texture.create_view(&wgpu::TextureViewDescriptor {
|
|
label: Some("wgpu render target plane view"),
|
|
aspect: plane,
|
|
usage: Some(wgpu::TextureUsages::RENDER_ATTACHMENT),
|
|
..Default::default()
|
|
});
|
|
|
|
Self {
|
|
graphics_texture_bind_group,
|
|
pipeline,
|
|
plane,
|
|
plane_view,
|
|
}
|
|
}
|
|
|
|
fn render(&self, encoder: &mut wgpu::CommandEncoder) {
|
|
let clear_color = match self.plane {
|
|
wgpu::TextureAspect::Plane0 => wgpu::Color::BLACK,
|
|
wgpu::TextureAspect::Plane1 => wgpu::Color {
|
|
r: 0.5,
|
|
g: 0.5,
|
|
b: 0.0,
|
|
a: 1.0,
|
|
},
|
|
_ => unreachable!(),
|
|
};
|
|
|
|
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
|
|
label: Some("wgpu render pass"),
|
|
timestamp_writes: None,
|
|
occlusion_query_set: None,
|
|
depth_stencil_attachment: None,
|
|
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
|
|
view: &self.plane_view,
|
|
ops: wgpu::Operations {
|
|
load: wgpu::LoadOp::Clear(clear_color),
|
|
store: wgpu::StoreOp::Store,
|
|
},
|
|
resolve_target: None,
|
|
depth_slice: None,
|
|
})],
|
|
multiview_mask: None,
|
|
});
|
|
|
|
render_pass.set_bind_group(0,&self.graphics_texture_bind_group,&[]);
|
|
render_pass.set_pipeline(&self.pipeline);
|
|
render_pass.draw(0..3, 0..1);
|
|
}
|
|
}
|