add video encoder

This commit is contained in:
2026-03-10 09:10:04 -07:00
parent f82d860822
commit 8d1ec94ac2
6 changed files with 571 additions and 2 deletions

14
video-encoder/Cargo.toml Normal file
View File

@@ -0,0 +1,14 @@
[package]
name = "video-encoder"
version = "0.1.0"
edition = "2024"
[dependencies]
glam.workspace = true
wgpu = "28.0.0"
strafesnet_roblox_bot_player.workspace = true
strafesnet_common.workspace = true
strafesnet_graphics.workspace = true
strafesnet_roblox_bot_file.workspace = true
strafesnet_snf.workspace = true
vk-video = "0.2.0"

4
video-encoder/README.md Normal file
View File

@@ -0,0 +1,4 @@
### How it works
- Render RGB to graphics_texture
- Convert RGB to YUV on video_texture
- Encode video frame

View File

@@ -0,0 +1,51 @@
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(1) uv: vec2<f32>,
}
@vertex
fn vs_main(@builtin(vertex_index) vertex_index: u32) -> VertexOutput {
// hacky way to draw a large triangle
let tmp1 = i32(vertex_index) / 2;
let tmp2 = i32(vertex_index) & 1;
var result:VertexOutput;
result.position=vec4<f32>(
f32(tmp1) * 4.0 - 1.0,
f32(tmp2) * 4.0 - 1.0,
1.0,
1.0
);
result.uv=vec2<f32>(
f32(tmp1) * 2.0,
1.0 - f32(tmp2) * 2.0
);
return result;
}
@group(0)
@binding(0)
var texture: texture_2d<f32>;
@group(0)
@binding(1)
var texture_sampler: sampler;
@fragment
fn fs_main_y(input: VertexOutput) -> @location(0) f32 {
let conversion_weights = vec3<f32>(0.2126, 0.7152, 0.0722);
let color = textureSample(texture, texture_sampler, input.uv).rgb;
return clamp(dot(color, conversion_weights), 0.0, 1.0);
}
@fragment
fn fs_main_uv(input: VertexOutput) -> @location(0) vec2<f32> {
let conversion_weights = mat3x2<f32>(
-0.1146, 0.5,
-0.3854, -0.4542,
0.5, -0.0458,
);
let conversion_bias = vec2<f32>(0.5, 0.5);
let color = textureSample(texture, texture_sampler, input.uv).rgb;
return clamp(conversion_weights * color + conversion_bias, vec2(0.0, 0.0), vec2(1.0, 1.0));
}

367
video-encoder/src/main.rs Normal file
View File

@@ -0,0 +1,367 @@
use std::io::Write;
use strafesnet_common::session::Time as SessionTime;
pub fn main(){
let vulkan_instance = vk_video::VulkanInstance::new().unwrap();
let vulkan_adapter = vulkan_instance.create_adapter(None).unwrap();
let vulkan_device = vulkan_adapter
.create_device(
wgpu::Features::TEXTURE_COMPRESSION_BC,
wgpu::ExperimentalFeatures::disabled(),
wgpu::Limits::defaults(),
)
.unwrap();
let size = glam::uvec2(1920,1080);
let target_framerate = 60;
let average_bitrate = 10_000_000;
let max_bitrate = 20_000_000;
let bot_file=include_bytes!("../../web-demo/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d.qbot");
let map_file=include_bytes!("../../web-demo/bhop_marble_5692093612.snfm");
// decode
let timelines=strafesnet_roblox_bot_file::v0::read_all_to_block(std::io::Cursor::new(bot_file)).unwrap();
let map=strafesnet_snf::read_map(std::io::Cursor::new(map_file)).unwrap().into_complete_map().unwrap();
// playback
let bot=strafesnet_roblox_bot_player::bot::CompleteBot::new(timelines);
let mut playback_head=strafesnet_roblox_bot_player::head::PlaybackHead::new(&bot,SessionTime::ZERO);
let mut wgpu_state = WgpuState::new(
vulkan_device.wgpu_device(),
vulkan_device.wgpu_queue(),
size,
);
wgpu_state.change_map(&map);
let mut encoder = vulkan_device
.create_wgpu_textures_encoder(
vulkan_device
.encoder_parameters_high_quality(
vk_video::parameters::VideoParameters {
width:size.x.try_into().unwrap(),
height:size.y.try_into().unwrap(),
target_framerate:target_framerate.into(),
},
vk_video::parameters::RateControl::VariableBitrate {
average_bitrate,
max_bitrate,
virtual_buffer_size: std::time::Duration::from_secs(2),
},
)
.unwrap(),
)
.unwrap();
let mut output_file = std::fs::File::create("output.h264").unwrap();
let duration = bot.duration();
for i in 0..duration.get()*target_framerate as i64/SessionTime::ONE_SECOND.get() {
let time=SessionTime::raw(i*SessionTime::ONE_SECOND.get()/target_framerate as i64);
playback_head.advance_time(&bot,time);
let (pos,angles)=playback_head.get_position_angles(&bot,time);
wgpu_state.render(pos,angles);
let res = unsafe {
encoder
.encode(
vk_video::Frame {
data: wgpu_state.video_texture.clone(),
pts: None,
},
false,
)
.unwrap()
};
output_file.write_all(&res.data).unwrap();
}
}
struct WgpuState {
device: wgpu::Device,
queue: wgpu::Queue,
// graphics output
graphics:strafesnet_roblox_bot_player::graphics::Graphics,
// not sure if this needs to stay bound to keep the TextureView valid
#[expect(unused)]
graphics_texture: wgpu::Texture,
graphics_texture_view: wgpu::TextureView,
// video output
video_texture: wgpu::Texture,
y_renderer: PlaneRenderer,
uv_renderer: PlaneRenderer,
}
impl WgpuState {
fn new(
device: wgpu::Device,
queue: wgpu::Queue,
size: glam::UVec2,
) -> WgpuState {
const FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Bgra8UnormSrgb;
let graphics = strafesnet_roblox_bot_player::graphics::Graphics::new(&device,&queue,size,FORMAT);
let shader = wgpu::include_wgsl!("../shaders/rgb_to_yuv.wgsl");
let shader = device.create_shader_module(shader);
let graphics_texture_bind_group_layout=device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{
label:Some("RGB Bind Group Layout"),
entries:&[
wgpu::BindGroupLayoutEntry{
binding:0,
visibility:wgpu::ShaderStages::FRAGMENT,
ty:wgpu::BindingType::Texture{
sample_type:wgpu::TextureSampleType::Float{filterable:true},
multisampled:false,
view_dimension:wgpu::TextureViewDimension::D2,
},
count:None,
},
wgpu::BindGroupLayoutEntry{
binding:1,
visibility:wgpu::ShaderStages::FRAGMENT,
ty:wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count:None,
},
],
});
let graphics_texture=device.create_texture(&wgpu::TextureDescriptor{
label:Some("RGB texture"),
format:FORMAT,
size:wgpu::Extent3d{
width:size.x,
height:size.y,
depth_or_array_layers:1,
},
mip_level_count:1,
sample_count:1,
dimension:wgpu::TextureDimension::D2,
usage:wgpu::TextureUsages::RENDER_ATTACHMENT|wgpu::TextureUsages::TEXTURE_BINDING,
view_formats:&[],
});
let graphics_texture_view = graphics_texture.create_view(&wgpu::TextureViewDescriptor {
label: Some("RGB texture view"),
aspect: wgpu::TextureAspect::All,
usage: Some(wgpu::TextureUsages::RENDER_ATTACHMENT|wgpu::TextureUsages::TEXTURE_BINDING),
..Default::default()
});
let clamp_sampler=device.create_sampler(&wgpu::SamplerDescriptor{
label:Some("Clamp Sampler"),
address_mode_u:wgpu::AddressMode::ClampToEdge,
address_mode_v:wgpu::AddressMode::ClampToEdge,
address_mode_w:wgpu::AddressMode::ClampToEdge,
mag_filter:wgpu::FilterMode::Linear,
min_filter:wgpu::FilterMode::Linear,
mipmap_filter:wgpu::MipmapFilterMode::Linear,
..Default::default()
});
let graphics_texture_bind_group=device.create_bind_group(&wgpu::BindGroupDescriptor{
layout:&graphics_texture_bind_group_layout,
entries:&[
wgpu::BindGroupEntry{
binding:0,
resource:wgpu::BindingResource::TextureView(&graphics_texture_view),
},
wgpu::BindGroupEntry{
binding:1,
resource:wgpu::BindingResource::Sampler(&clamp_sampler),
},
],
label:Some("Graphics Texture"),
});
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("wgpu pipeline layout"),
bind_group_layouts: &[
&graphics_texture_bind_group_layout
],
immediate_size: 0,
});
let video_texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("wgpu render target"),
format: wgpu::TextureFormat::NV12,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
dimension: wgpu::TextureDimension::D2,
sample_count: 1,
view_formats: &[],
mip_level_count: 1,
size: wgpu::Extent3d {
width: size.x,
height: size.y,
depth_or_array_layers: 1,
},
});
let y_renderer = PlaneRenderer::new(
&device,
&pipeline_layout,
&shader,
"fs_main_y",
&video_texture,
wgpu::TextureAspect::Plane0,
graphics_texture_bind_group.clone(),
);
let uv_renderer = PlaneRenderer::new(
&device,
&pipeline_layout,
&shader,
"fs_main_uv",
&video_texture,
wgpu::TextureAspect::Plane1,
graphics_texture_bind_group,
);
WgpuState {
device,
queue,
graphics,
graphics_texture,
graphics_texture_view,
video_texture,
y_renderer,
uv_renderer,
}
}
fn change_map(&mut self,map:&strafesnet_common::map::CompleteMap){
self.graphics.change_map(&self.device,&self.queue,map);
}
fn render(&mut self,pos:glam::Vec3,angles:glam::Vec2) {
let mut encoder = self
.device
.create_command_encoder(&wgpu::CommandEncoderDescriptor {
label: Some("wgpu encoder"),
});
self.graphics.encode_commands(&mut encoder,&self.graphics_texture_view,pos,angles);
self.y_renderer.render(&mut encoder);
self.uv_renderer.render(&mut encoder);
encoder.transition_resources(
[].into_iter(),
[wgpu::TextureTransition {
texture: &self.video_texture,
state: wgpu::TextureUses::COPY_SRC,
selector: None,
}]
.into_iter(),
);
let buffer = encoder.finish();
self.queue.submit([buffer]);
}
}
struct PlaneRenderer {
graphics_texture_bind_group: wgpu::BindGroup,
pipeline: wgpu::RenderPipeline,
plane: wgpu::TextureAspect,
plane_view: wgpu::TextureView,
}
impl PlaneRenderer {
fn new(
device: &wgpu::Device,
pipeline_layout: &wgpu::PipelineLayout,
shader: &wgpu::ShaderModule,
fragment_entry_point: &str,
texture: &wgpu::Texture,
plane: wgpu::TextureAspect,
graphics_texture_bind_group: wgpu::BindGroup,
) -> Self {
let format = texture.format().aspect_specific_format(plane).unwrap();
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("wgpu pipeline"),
layout: Some(pipeline_layout),
cache: None,
vertex: wgpu::VertexState {
module: shader,
buffers: &[],
entry_point: None,
compilation_options: Default::default(),
},
fragment: Some(wgpu::FragmentState {
module: shader,
entry_point: Some(fragment_entry_point),
compilation_options: Default::default(),
targets: &[Some(wgpu::ColorTargetState {
blend: None,
format,
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
cull_mode: Some(wgpu::Face::Back),
polygon_mode: wgpu::PolygonMode::Fill,
front_face: wgpu::FrontFace::Cw,
conservative: false,
unclipped_depth: false,
strip_index_format: None,
},
multiview_mask: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
depth_stencil: None,
});
let plane_view = texture.create_view(&wgpu::TextureViewDescriptor {
label: Some("wgpu render target plane view"),
aspect: plane,
usage: Some(wgpu::TextureUsages::RENDER_ATTACHMENT),
..Default::default()
});
Self {
graphics_texture_bind_group,
pipeline,
plane,
plane_view,
}
}
fn render(&self, encoder: &mut wgpu::CommandEncoder) {
let clear_color = match self.plane {
wgpu::TextureAspect::Plane0 => wgpu::Color::BLACK,
wgpu::TextureAspect::Plane1 => wgpu::Color {
r: 0.5,
g: 0.5,
b: 0.0,
a: 1.0,
},
_ => unreachable!(),
};
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("wgpu render pass"),
timestamp_writes: None,
occlusion_query_set: None,
depth_stencil_attachment: None,
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &self.plane_view,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(clear_color),
store: wgpu::StoreOp::Store,
},
resolve_target: None,
depth_slice: None,
})],
multiview_mask: None,
});
render_pass.set_bind_group(0,&self.graphics_texture_bind_group,&[]);
render_pass.set_pipeline(&self.pipeline);
render_pass.draw(0..3, 0..1);
}
}