24 Commits
peq ... master

Author SHA1 Message Date
c807fd98e6 v0.9.4 Head::partition_point 2026-03-18 09:00:31 -07:00
d108697552 new Head constructor 2026-03-18 08:58:20 -07:00
0cd02fbb75 v0.9.3 fix Head::after_time 2026-02-27 08:33:57 -08:00
3674bb38b3 slightly wrong 2026-02-27 08:33:40 -08:00
3323b0109a v0.9.2 fix Head::after_Time 2026-02-27 07:31:18 -08:00
8c776f29b0 plain wrong 2026-02-27 07:30:51 -08:00
87ba477f63 v0.9.1 Head fns 2026-02-27 06:42:10 -08:00
508a0db66f add functions to Head 2026-02-27 06:39:19 -08:00
e88d62c3ea v0.9.0 event enum un-unification + exposed Head 2026-02-20 07:06:32 -08:00
1b6b02ef5a expose Head mechanism for bot player 2026-02-20 07:04:14 -08:00
dc87100e91 impl Copy for Vector types 2026-02-20 07:03:44 -08:00
224a873d89 un-unify setting event to name fields 2026-02-13 10:06:47 -08:00
3cf39032cc const trey float convert 2026-02-13 10:05:25 -08:00
0c896f3ee1 doc RunPrepareEvent 2026-02-13 07:21:32 -08:00
17edf88e05 un-unify camera event to name fields 2026-02-13 07:17:40 -08:00
42e91e3627 v0.8.1 serializer 2025-12-15 15:09:51 -08:00
f9c5ef7b44 Implement Serializer (#3)
Add an algorithm to generate a bot file from a set of timelines.

Reviewed-on: #3
Co-authored-by: Rhys Lloyd <krakow20@gmail.com>
Co-committed-by: Rhys Lloyd <krakow20@gmail.com>
2025-12-15 23:09:21 +00:00
b9aaf9d30f v0.8.0 drop itertools 2025-12-15 15:07:49 -08:00
8b365a0579 explicitly note that timelines must be sorted 2025-12-15 14:52:37 -08:00
71685468b6 reexport binrw::Error and make it appear in fn signatures 2025-12-15 14:48:43 -08:00
9ace707bbb refactor tests 2025-12-15 14:41:41 -08:00
7cb66719b0 add InvalidBlockId error 2025-12-15 14:41:24 -08:00
27aa3fb5d1 drop itertools dep (temporarily) 2025-12-15 14:30:07 -08:00
a6c10eab21 use correct trait bound 2025-12-14 11:42:44 -08:00
5 changed files with 488 additions and 90 deletions

2
Cargo.lock generated
View File

@@ -85,7 +85,7 @@ dependencies = [
[[package]]
name = "strafesnet_roblox_bot_file"
version = "0.7.0"
version = "0.9.4"
dependencies = [
"binrw",
"bitflags",

View File

@@ -1,7 +1,7 @@
[package]
name = "strafesnet_roblox_bot_file"
version = "0.7.0"
edition = "2021"
version = "0.9.4"
edition = "2024"
[dependencies]
binrw = "0.15.0"

View File

@@ -1,3 +1,5 @@
pub use binrw::Error as BinrwError;
pub mod v0;
#[cfg(test)]

View File

@@ -1,40 +1,41 @@
use crate::v0::{Block,BlockTimelines,FileHeader,Timed};
use crate::v0;
use crate::v0::{Block,BlockTimelines,FileHeader};
#[test]
fn _1(){
fn deserialize_manual()->Result<(),binrw::Error>{
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let mut input=std::io::Cursor::new(file);
let header=FileHeader::from_reader(&mut input).unwrap();
let timelines=BlockTimelines::from_reader(&header,&mut input).unwrap();
println!("header={:?}",header);
for &Timed{time,event:block_id} in timelines.offline_blocks(){
println!("offline time={} block_id={:?}",time,block_id);
let take_seek=timelines.block_info(block_id).unwrap().take_seek(&mut input).unwrap();
let _block=Block::from_reader(take_seek).unwrap();
// offline blocks include the following event types:
// World, Gravity, Run, Camera, Setting
let header=FileHeader::from_reader(&mut input)?;
let timelines=BlockTimelines::from_reader(&header,&mut input)?;
for block in timelines.offline_blocks(){
let block_info=timelines.block_info(block.event).unwrap();
let block_reader=block_info.take_seek(&mut input)?;
let _block=Block::from_reader(block_reader)?;
}
for &Timed{time,event:block_id} in timelines.realtime_blocks(){
println!("realtime time={} block_id={:?}",time,block_id);
let take_seek=timelines.block_info(block_id).unwrap().take_seek(&mut input).unwrap();
let _block=Block::from_reader(take_seek).unwrap();
// realtime blocks include the following event types:
// Input, Output, Sound
for block in timelines.realtime_blocks(){
let block_info=timelines.block_info(block.event).unwrap();
let block_reader=block_info.take_seek(&mut input)?;
let _block=Block::from_reader(block_reader)?;
}
Ok(())
}
#[test]
fn deserialize_all()->Result<(),v0::Error>{
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let _block=v0::read_all_to_block(std::io::Cursor::new(file))?;
Ok(())
}
#[test]
#[cfg(feature="itertools")]
fn _2()->Result<(),crate::v0::Error>{
fn serialize_round_trip()->Result<(),binrw::Error>{
let file=std::fs::read("files/bhop_marble_7cf33a64-7120-4514-b9fa-4fe29d9523d").unwrap();
let block=v0::read_all_to_block(std::io::Cursor::new(file.as_slice())).unwrap();
let t0=std::time::Instant::now();
let _block=crate::v0::read_all_to_block(std::io::Cursor::new(file))?;
println!("{:?}",t0.elapsed());
let mut data=Vec::with_capacity(file.len());
v0::serialize(&block,&mut std::io::Cursor::new(&mut data))?;
// TODO: It encodes, but is it equal? Test something! PartialEq?
Ok(())
}
// TODO: file serialization test

519
src/v0.rs
View File

@@ -2,39 +2,59 @@ use std::io::{SeekFrom,Error as IoError};
use binrw::binrw;
use binrw::io::{TakeSeek,TakeSeekExt};
use binrw::BinReaderExt;
use crate::BinrwError;
// the bit chunks are deposited in reverse
fn read_trey_float(bits:u32)->f32{
const fn read_trey_float(bits:u32)->f32{
let s=bits&1;
let e=(bits>>1)&((1<<8)-1);
let m=(bits>>(1+8))&((1<<23)-1);
f32::from_bits(m|(e<<23)|(s<<31))
}
fn read_trey_double(bits:u64)->f64{
const fn write_trey_float(value:&f32)->u32{
let bits=value.to_bits();
let s=(bits>>31)&1;
let e=(bits>>23)&((1<<8)-1);
let m=bits&((1<<23)-1);
m<<(1+8)|(e<<1)|s
}
const fn read_trey_double(bits:u64)->f64{
let s=bits&1;
let e=(bits>>1)&((1<<11)-1);
let m=(bits>>(1+11))&((1<<52)-1);
f64::from_bits(m|(e<<52)|(s<<63))
}
const fn write_trey_double(value:&f64)->u64{
let bits=value.to_bits();
let s=(bits>>63)&1;
let e=(bits>>52)&((1<<11)-1);
let m=bits&((1<<52)-1);
m<<(1+11)|(e<<1)|s
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
#[derive(Debug,Clone,Copy)]
pub struct Vector2{
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub x:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub y:f32,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
#[derive(Debug,Clone,Copy)]
pub struct Vector3{
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub x:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub y:f32,
#[br(map=read_trey_float)]
#[bw(map=write_trey_float)]
pub z:f32,
}
@@ -84,6 +104,7 @@ pub struct Timed<E>
E:for<'a>binrw::BinWrite<Args<'a>=()>,
{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub time:f64,
pub event:E,
}
@@ -213,6 +234,7 @@ pub struct WorldEventButton{
#[derive(Debug,Clone)]
pub struct WorldEventSetTime{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub time:f64,
#[br(temp)]
#[bw(ignore)]
@@ -328,6 +350,7 @@ pub enum FlagReason{
#[brw(magic=9u32)]
Practice,
}
/// Creates a new run when the player enters a start zone.
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
@@ -390,49 +413,95 @@ pub enum RunEvent{
}
// camera
/// Punches the camera when the player has an intense collision.
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub enum CameraEventType{
#[brw(magic=0u32)]
CameraPunch,
#[brw(magic=1u32)]
Transform,
#[derive(Debug,Clone)]
pub struct CameraEventCameraPunch{
pub rot_velocity:Vector3,
}
/// Rotates the camera when the player goes through a wormhole.
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct CameraEventTransform{
pub axis_angle:Vector3,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct CameraEvent{
pub camera_event_type:CameraEventType,
pub value:Vector3,
pub enum CameraEvent{
#[brw(magic=0u32)]
CameraPunch(CameraEventCameraPunch),
#[brw(magic=1u32)]
Transform(CameraEventTransform),
}
// setting
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy,Hash,Eq,PartialEq)]
pub enum SettingType{
#[brw(magic=0u32)]
FieldOfView,
#[brw(magic=1u32)]
Sensitivity,
#[brw(magic=2u32)]
VerticalSensitivityMultiplier,
#[brw(magic=3u32)]
AbsoluteSensitivity,
#[brw(magic=4u32)]
TurnSpeed,
#[derive(Debug,Clone)]
pub struct SettingEventFieldOfView{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub fov:f64,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SettingEvent{
pub setting_type:SettingType,
pub struct SettingEventSensitivity{
#[br(map=read_trey_double)]
pub value:f64,
#[bw(map=write_trey_double)]
pub sensitivity:f64,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SettingEventVerticalSensitivityMultiplier{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub multiplier:f64,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SettingEventAbsoluteSensitivity{
#[br(map=|v:u64|read_trey_double(v)==1.0)]
#[bw(map=|&enabled:&bool|
if enabled{
write_trey_double(&1.0)
}else{
write_trey_double(&0.0)
}
)]
pub enabled:bool,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub struct SettingEventTurnSpeed{
#[br(map=read_trey_double)]
#[bw(map=write_trey_double)]
pub turn_speed:f64,
}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
pub enum SettingEvent{
#[brw(magic=0u32)]
FieldOfView(SettingEventFieldOfView),
#[brw(magic=1u32)]
Sensitivity(SettingEventSensitivity),
#[brw(magic=2u32)]
VerticalSensitivityMultiplier(SettingEventVerticalSensitivityMultiplier),
#[brw(magic=3u32)]
AbsoluteSensitivity(SettingEventAbsoluteSensitivity),
#[brw(magic=4u32)]
TurnSpeed(SettingEventTurnSpeed),
}
/// A segment of event timelines.
/// Timelines are always be sorted.
#[derive(Default)]
pub struct Block{
pub input_events:Vec<Timed<InputEvent>>,
@@ -447,7 +516,8 @@ pub struct Block{
#[binrw]
#[brw(little)]
enum EventType{
#[derive(Clone,Copy)]
pub enum EventType{
#[brw(magic=1u32)]
Input,
#[brw(magic=2u32)]
@@ -473,15 +543,15 @@ struct EventChunkHeader{
}
// binread args tech has been further refined
fn read_data_into_events<'a,R,T,F>(
fn read_data_into_events<R,T,F>(
data:&mut R,
events:&mut Vec<T>,
num_events:usize,
reserve_fn:F,
)->binrw::BinResult<()>
)->Result<(),BinrwError>
where
R:BinReaderExt,
T:binrw::BinRead<Args<'a>=()>,
T:for<'a> binrw::BinRead<Args<'a>=()>,
F:Fn(&mut Vec<T>,usize),
{
reserve_fn(events,num_events);
@@ -492,7 +562,7 @@ fn read_data_into_events<'a,R,T,F>(
}
impl Block{
pub fn from_reader<R:BinReaderExt>(data:R)->binrw::BinResult<Block>{
pub fn from_reader<R:BinReaderExt>(data:R)->Result<Block,BinrwError>{
let mut block=Block::default();
// there is only supposed to be at most one of each type
// of event chunk per block, so allocate the size exactly.
@@ -501,7 +571,7 @@ impl Block{
}
/// Read a complete data block and append the elements to the timelines in this block.
/// Reserves exactly enough information for the new data.
pub fn extend_from_reader_exact<R:BinReaderExt>(&mut self,mut data:R)->binrw::BinResult<()>{
pub fn extend_from_reader_exact<R:BinReaderExt>(&mut self,mut data:R)->Result<(),BinrwError>{
// well... this looks error prone
while let Ok(event_chunk_header)=data.read_le::<EventChunkHeader>(){
match event_chunk_header.event_type{
@@ -518,7 +588,7 @@ impl Block{
Ok(())
}
/// Read a complete data block and append the elements to the timelines in this block.
pub fn extend_from_reader<R:BinReaderExt>(&mut self,mut data:R)->binrw::BinResult<()>{
pub fn extend_from_reader<R:BinReaderExt>(&mut self,mut data:R)->Result<(),BinrwError>{
// sad code duplication
while let Ok(event_chunk_header)=data.read_le::<EventChunkHeader>(){
match event_chunk_header.event_type{
@@ -534,13 +604,23 @@ impl Block{
}
Ok(())
}
fn extend_from_block_id_iter<'a,R:BinReaderExt>(&mut self,mut data:R,block_timelines:&BlockTimelines,blocks:impl IntoIterator<Item=&'a Timed<BlockId>>)->Result<(),Error>{
for timed in blocks{
let take_seek=block_timelines
.block_info(timed.event)?
.take_seek(&mut data)
.map_err(Error::Seek)?;
self.extend_from_reader(take_seek).map_err(Error::InvalidData)?;
}
Ok(())
}
}
#[derive(Debug)]
pub enum Error{
InvalidBlockId(BlockId),
InvalidBlockId(InvalidBlockId),
Seek(IoError),
InvalidData(binrw::Error),
InvalidData(BinrwError),
}
impl std::fmt::Display for Error{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
@@ -552,11 +632,33 @@ impl std::error::Error for Error{}
#[binrw]
#[brw(little)]
#[derive(Debug,Clone,Copy)]
pub struct BlockId(#[br(map=|i:u32|i-1)]u32);
pub struct BlockId(
#[br(map=|i:u32|i-1)]
#[bw(map=|&i:&u32|i+1)]
u32
);
#[binrw]
#[brw(little)]
#[derive(Debug,Clone)]
struct BlockPosition(#[br(map=|i:u32|i-1)]u32);
struct BlockPosition(
#[br(map=|i:u32|i-1)]
#[bw(map=|&i:&u32|i+1)]
u32
);
#[derive(Debug)]
pub struct InvalidBlockId(pub BlockId);
impl std::fmt::Display for InvalidBlockId{
fn fmt(&self,f:&mut std::fmt::Formatter<'_>)->std::fmt::Result{
write!(f,"{self:?}")
}
}
impl std::error::Error for InvalidBlockId{}
impl From<InvalidBlockId> for Error{
fn from(value:InvalidBlockId)->Self{
Self::InvalidBlockId(value)
}
}
/// The first 16 bytes of the file.
#[binrw]
@@ -569,7 +671,7 @@ pub struct FileHeader{
num_realtime_blocks:u32,
}
impl FileHeader{
pub fn from_reader<R:BinReaderExt>(mut data:R)->binrw::BinResult<Self>{
pub fn from_reader<R:BinReaderExt>(mut data:R)->Result<Self,BinrwError>{
data.read_le()
}
fn block_position_count(&self)->u32{
@@ -603,7 +705,7 @@ pub struct BlockTimelines{
realtime_blocks_timeline:Vec<Timed<BlockId>>,
}
impl BlockTimelines{
pub fn from_reader<R:BinReaderExt>(header:&FileHeader,mut data:R)->binrw::BinResult<Self>{
pub fn from_reader<R:BinReaderExt>(header:&FileHeader,mut data:R)->Result<Self,BinrwError>{
data.read_le_args(header)
}
/// "Offline" blocks (containing World, Gravity, Run, Camera, and Setting events) in chronological order.
@@ -615,12 +717,13 @@ impl BlockTimelines{
&self.realtime_blocks_timeline
}
/// Get BlockInfo for a specfic BlockId.
pub fn block_info(&self,BlockId(block_id):BlockId)->Result<BlockInfo,Error>{
if self.block_positions.len() as u32<=block_id{
return Err(Error::InvalidBlockId(BlockId(block_id)));
pub fn block_info(&self,block_id:BlockId)->Result<BlockInfo,InvalidBlockId>{
let BlockId(id)=block_id;
if self.block_positions.len() as u32<=id{
return Err(InvalidBlockId(block_id));
}
let BlockPosition(start)=self.block_positions[block_id as usize];
let BlockPosition(end)=self.block_positions[block_id as usize+1];
let BlockPosition(start)=self.block_positions[id as usize];
let BlockPosition(end)=self.block_positions[id as usize+1];
Ok(BlockInfo(start..end))
}
}
@@ -641,24 +744,13 @@ impl core::ops::Deref for BlockInfo{
}
}
fn read_to_block<'a,R:BinReaderExt>(mut data:R,block_timelines:&BlockTimelines,blocks:impl IntoIterator<Item=&'a Timed<BlockId>>)->Result<Block,Error>{
let mut block=Block::default();
for timed in blocks{
let take_seek=block_timelines
.block_info(timed.event)?
.take_seek(&mut data)
.map_err(Error::Seek)?;
block.extend_from_reader(take_seek).map_err(Error::InvalidData)?;
}
Ok(block)
}
/// Read offline blocks and combine the timelines into a single Block.
/// Note that this reads the blocks in chronological order, not the order they appear in the file, so there is some seeking involved.
pub fn read_offline_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let block=read_to_block(data,&block_timelines,block_timelines.offline_blocks())?;
let mut block=Block::default();
block.extend_from_block_id_iter(data,&block_timelines,block_timelines.offline_blocks())?;
Ok(block)
}
@@ -667,16 +759,319 @@ pub fn read_offline_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
pub fn read_realtime_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let block=read_to_block(data,&block_timelines,block_timelines.realtime_blocks())?;
let mut block=Block::default();
block.extend_from_block_id_iter(data,&block_timelines,block_timelines.realtime_blocks())?;
Ok(block)
}
/// Read the entire file and combine the timelines into a single Block.
/// Note that this reads the blocks in chronological order, not the order they appear in the file, so there is some seeking involved.
#[cfg(feature="itertools")]
pub fn read_all_to_block<R:BinReaderExt>(mut data:R)->Result<Block,Error>{
let header=FileHeader::from_reader(&mut data).map_err(Error::InvalidData)?;
let block_timelines=BlockTimelines::from_reader(&header,&mut data).map_err(Error::InvalidData)?;
let block=read_to_block(data,&block_timelines,itertools::merge(block_timelines.offline_blocks(),block_timelines.realtime_blocks()))?;
let mut block=Block::default();
block.extend_from_block_id_iter(&mut data,&block_timelines,block_timelines.offline_blocks())?;
block.extend_from_block_id_iter(&mut data,&block_timelines,block_timelines.realtime_blocks())?;
Ok(block)
}
const NUM_EVENT_TYPES:usize=8;
#[derive(Clone,Copy)]
pub struct Head([usize;NUM_EVENT_TYPES]);
impl Head{
pub const fn new()->Self{
Self([0;NUM_EVENT_TYPES])
}
/// Use `Head::partition_point` instead.
#[deprecated]
pub fn after_time(block:&Block,time:f64)->Self{
Self([
block.input_events.partition_point(|event|event.time<=time),
block.output_events.partition_point(|event|event.time<=time),
block.sound_events.partition_point(|event|event.time<=time),
block.world_events.partition_point(|event|event.time<=time),
block.gravity_events.partition_point(|event|event.time<=time),
block.run_events.partition_point(|event|event.time<=time),
block.camera_events.partition_point(|event|event.time<=time),
block.setting_events.partition_point(|event|event.time<=time),
])
}
/// Uses a binary search to initialize the head positions according to a predicate.
/// You probably want `|event_time|event_time<=time`
pub fn partition_point(block:&Block,pred:impl Fn(f64)->bool)->Self{
Self([
block.input_events.partition_point(|event|pred(event.time)),
block.output_events.partition_point(|event|pred(event.time)),
block.sound_events.partition_point(|event|pred(event.time)),
block.world_events.partition_point(|event|pred(event.time)),
block.gravity_events.partition_point(|event|pred(event.time)),
block.run_events.partition_point(|event|pred(event.time)),
block.camera_events.partition_point(|event|pred(event.time)),
block.setting_events.partition_point(|event|pred(event.time)),
])
}
// compare an event at the head of the plan to the best event collected so far.
fn collect_event<E>(
&self,
event_type:EventType,
list:&[Timed<E>],
best:&mut Option<Timed<EventType>>,
)
where
E:for<'a>binrw::BinRead<Args<'a>=()>,
E:for<'a>binrw::BinWrite<Args<'a>=()>,
{
if let Some(event)=list.get(self.get_event_index(event_type))
&&best.as_ref().is_none_or(|b|event.time<b.time)
{
*best=Some(Timed{time:event.time,event:event_type});
}
}
fn collect_offline(&self,block:&Block,next_event:&mut Option<Timed<EventType>>){
self.collect_event(EventType::World,&block.world_events,next_event);
self.collect_event(EventType::Gravity,&block.gravity_events,next_event);
self.collect_event(EventType::Run,&block.run_events,next_event);
self.collect_event(EventType::Camera,&block.camera_events,next_event);
self.collect_event(EventType::Setting,&block.setting_events,next_event);
}
fn collect_realtime(&self,block:&Block,next_event:&mut Option<Timed<EventType>>){
self.collect_event(EventType::Input,&block.input_events,next_event);
self.collect_event(EventType::Output,&block.output_events,next_event);
self.collect_event(EventType::Sound,&block.sound_events,next_event);
}
pub fn next_event(&self,block:&Block)->Option<Timed<EventType>>{
let mut next_event=None;
// This order is particular.
// Setting must appear before Input for strafe client resimulation to work.
self.collect_offline(block,&mut next_event);
self.collect_realtime(block,&mut next_event);
next_event
}
pub const fn get_event_index(&self,event_type:EventType)->usize{
self.0[event_type as usize]
}
pub const fn set_event_index(&mut self,event_type:EventType,index:usize){
self.0[event_type as usize]=index;
}
/// Add the new event.
pub const fn push(&mut self,event_type:EventType){
self.0[event_type as usize]+=1;
}
}
#[cfg(feature="itertools")]
pub fn serialize<W:binrw::BinWriterExt>(block:&Block,writer:&mut W)->Result<(),BinrwError>{
use std::ops::Range;
const MAX_BLOCK_SIZE:usize=1<<14;
const FILE_VERSION:u32=0;
const EVENT_TYPES:[EventType;NUM_EVENT_TYPES]=[
EventType::Input,
EventType::Output,
EventType::Sound,
EventType::World,
EventType::Gravity,
EventType::Run,
EventType::Camera,
EventType::Setting,
];
const EVENT_SIZE:[usize;NUM_EVENT_TYPES]=[
8+4+2*4, // Input
8+4+4*3*4, // Output
8+4+4, // Sound
8+4+12, // World
8+3*4, // Gravity
8+4+4+4, // Run
8+4+3*4, // Camera
8+4+8, // Setting
];
// A plan of what range of events to include in a data block.
struct Plan([Range<usize>;NUM_EVENT_TYPES]);
impl Plan{
fn new(start:&Head,end:&Head)->Self{
Plan(core::array::from_fn(|i|start.0[i]..end.0[i]))
}
/// Calculate the predicted size of the planned block.
fn size(&self)->usize{
self.0.iter()
.zip(EVENT_SIZE)
.filter_map(|(range,event_size)|match range.len(){
0=>None,
other=>Some(other*event_size+size_of::<EventChunkHeader>()),
})
.sum()
}
}
impl IntoIterator for Plan{
type IntoIter=core::iter::Zip<
core::array::IntoIter<EventType,NUM_EVENT_TYPES>,
core::array::IntoIter<Range<usize>,NUM_EVENT_TYPES>,
>;
type Item=(EventType,Range<usize>);
fn into_iter(self)->Self::IntoIter{
EVENT_TYPES.into_iter().zip(self.0)
}
}
/// Predict the size increment from adding a new event.
fn predict_size_increment(head:&Head,event_type:EventType)->usize{
let new_chunk_header=head.get_event_index(event_type)==0;
let mask=(-(new_chunk_header as isize)) as usize;
EVENT_SIZE[event_type as usize]+(mask&size_of::<EventChunkHeader>())
}
// plan a single block: collect events until the block is full
fn plan_block(head:&mut Head,next_event:impl Fn(&Head)->Option<Timed<EventType>>)->Option<f64>{
let mut size=0;
let first=next_event(head)?;
size+=predict_size_increment(head,first.event);
if MAX_BLOCK_SIZE<size{
return None;
}
head.push(first.event);
while let Some(event)=next_event(head){
size+=predict_size_increment(head,event.event);
if MAX_BLOCK_SIZE<size{
break;
}
head.push(event.event);
}
Some(first.time)
}
struct PlannedBlock{
// index is not the same as BlockId.
// It is list-local for both plan_offline and plan_realtime.
index:usize,
time:f64,
plan:Plan,
}
fn plan_timeline<F>(next_event:F)->std::collections::VecDeque<PlannedBlock>
where
F:Copy,
F:Fn(&Head)->Option<Timed<EventType>>
{
let mut timeline=std::collections::VecDeque::new();
let mut head=Head::new();
let mut last_head=head.clone();
let mut index=0;
while let Some(time)=plan_block(&mut head,next_event){
timeline.push_back(PlannedBlock{
index,
time,
plan:Plan::new(&last_head,&head),
});
last_head=head.clone();
index+=1;
}
timeline
}
// plan events into segments without spilling over max size threshold
// each plan describes the range of events included in the block.
let mut plan_offline=plan_timeline(|plan|{
let mut next_event=None;
plan.collect_offline(block,&mut next_event);
next_event
});
let mut plan_realtime=plan_timeline(|plan|{
let mut next_event=None;
plan.collect_realtime(block,&mut next_event);
next_event
});
let file_header=FileHeader{
file_version:FILE_VERSION,
num_offline_blocks:plan_offline.len() as u32,
num_realtime_blocks:plan_realtime.len() as u32,
};
let mut plan_order=Vec::with_capacity(plan_offline.len()+plan_realtime.len());
let mut block_positions=Vec::with_capacity(file_header.block_position_count() as usize);
// Fill the timelines with dummy values, we don't know the block ids yet.
// This can be done with Vec::spare_capacity_mut and unsafe, but whatever.
const DUMMY_BLOCK:Timed<BlockId>=Timed{time:0.0,event:BlockId(0)};
let mut offline_blocks_timeline=vec![DUMMY_BLOCK;plan_offline.len()];
let mut realtime_blocks_timeline=vec![DUMMY_BLOCK;plan_realtime.len()];
{
// position starts after the *predicted* end of the BlockTimelines
let mut position=file_header.block_timelines_info().end;
let mut block_id=0;
let mut push_block=|timeline:&mut Vec<Timed<BlockId>>,planned:PlannedBlock|{
block_positions.push(BlockPosition(position));
position+=planned.plan.size() as u32;
// write the block id to the correct index
timeline[planned.index]=Timed{
time:planned.time,
event:BlockId(block_id),
};
block_id+=1;
plan_order.push(planned.plan);
};
// the first block in the file is an offline block to
// initialize the state of things like the current style
if let Some(plan)=plan_offline.pop_front(){
push_block(&mut offline_blocks_timeline,plan);
}
// the second block is the first realtime block which
// includes the starting position of the replay
if let Some(plan)=plan_realtime.pop_front(){
push_block(&mut realtime_blocks_timeline,plan);
}
// the third block is the last realtime block which
// is used by the game client to determine the duration
if let Some(plan)=plan_realtime.pop_back(){
push_block(&mut realtime_blocks_timeline,plan);
}
// push the remaining blocks in chronological order
for either_plan in itertools::merge_join_by(
plan_offline,
plan_realtime,
|offline,realtime|offline.time<=realtime.time,
){
match either_plan{
itertools::Either::Left(offline)=>push_block(&mut offline_blocks_timeline,offline),
itertools::Either::Right(realtime)=>push_block(&mut realtime_blocks_timeline,realtime),
}
}
// final position
block_positions.push(BlockPosition(position));
}
let block_timelines=BlockTimelines{
block_positions,
offline_blocks_timeline,
realtime_blocks_timeline,
};
use binrw::BinWrite;
file_header.write_le(writer)?;
block_timelines.write_le(writer)?;
for plan in plan_order{
for (event_type,range) in plan{
let num_events=range.len();
if num_events==0{
continue;
}
let event_chunk_header=EventChunkHeader{
event_type,
num_events:num_events as u32,
};
event_chunk_header.write_le(writer)?;
match event_type{
EventType::Input=>block.input_events[range].write_le(writer)?,
EventType::Output=>block.output_events[range].write_le(writer)?,
EventType::Sound=>block.sound_events[range].write_le(writer)?,
EventType::World=>block.world_events[range].write_le(writer)?,
EventType::Gravity=>block.gravity_events[range].write_le(writer)?,
EventType::Run=>block.run_events[range].write_le(writer)?,
EventType::Camera=>block.camera_events[range].write_le(writer)?,
EventType::Setting=>block.setting_events[range].write_le(writer)?,
}
}
}
Ok(())
}