Finish fixed rate transform interpolation for the renderer
ci/woodpecker/push/build Pipeline failed Details

This commit is contained in:
SeanOMik 2023-11-03 19:50:00 -04:00
parent 1b723cc30b
commit b9b2c9f8e7
Signed by: SeanOMik
GPG Key ID: 568F326C7EB33ACB
6 changed files with 50 additions and 3181 deletions

18
Cargo.lock generated
View File

@ -763,6 +763,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "fps_counter"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3aaba7ff514ee9d802b562927f80b1e94e93d8e74c31b134c9c3762dabf1a36b"
[[package]]
name = "fuchsia-cprng"
version = "0.1.1"
@ -2293,6 +2299,18 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "testbed"
version = "0.1.0"
dependencies = [
"anyhow",
"async-std",
"edict",
"fps_counter",
"lyra-engine",
"tracing",
]
[[package]]
name = "thiserror"
version = "1.0.48"

View File

@ -5,7 +5,8 @@ edition = "2021"
[workspace]
members = [
"lyra-resource"
"lyra-resource",
"examples/testbed"
]
[dependencies]

File diff suppressed because it is too large Load Diff

View File

@ -11,6 +11,4 @@ anyhow = "1.0.75"
async-std = "1.12.0"
tracing = "0.1.37"
fps_counter = "2.0.0"
edict = "0.5.0"
[workspace]
edict = "0.5.0"

View File

@ -120,7 +120,7 @@ async fn main() {
game.world().insert_resource(TpsAccumulator(0.0));
let mut sys = BatchedSystem::new();
sys.with_criteria(FixedTimestep::new(10));
sys.with_criteria(FixedTimestep::new(45));
sys.with_system(spin_system);
sys.with_system(fps_system);

View File

@ -53,7 +53,8 @@ struct MeshBufferStorage {
pub struct CachedTransform {
last_updated_at: Option<Instant>,
cached_at: Instant,
transform: Transform,
to_transform: Transform,
from_transform: Transform,
}
pub struct BasicRenderer {
@ -525,67 +526,40 @@ impl Renderer for BasicRenderer {
for (entity, model, model_epoch, transform, transform_epoch) in main_world.query::<(Entities, &ModelComponent, EpochOf<ModelComponent>, &TransformComponent, EpochOf<TransformComponent>)>().iter() {
alive_entities.insert(entity);
let model = model.data.as_ref().unwrap().as_ref();
let cached = match self.entity_last_transforms.get_mut(&entity) {
Some(last) if transform_epoch == last_epoch => {
last.from_transform = last.to_transform;
last.to_transform = transform.transform;
last.last_updated_at = Some(last.cached_at);
last.cached_at = now_inst;
for mesh in model.meshes.iter() {
let last_transform = self.entity_last_transforms.get(&entity).cloned();
if last_transform.is_none() {
last.clone()
},
Some(last) => last.clone(),
None => {
let cached = CachedTransform {
last_updated_at: None,
cached_at: now_inst,
transform: transform.transform,
from_transform: transform.transform,
to_transform: transform.transform,
};
self.entity_last_transforms.insert(entity, cached);
} else if transform_epoch == last_epoch {
let last = self.entity_last_transforms.get_mut(&entity).unwrap();
last.transform = transform.transform;
last.last_updated_at = Some(last.cached_at);
last.cached_at = now_inst;
debug!("Updated transform");
// to get the fixed delta time, you'd just subtract last_updated_at and cached_at.
// to get the tps_accumulator, you'd get the elapsed ms of cached_at.
self.entity_last_transforms.insert(entity, cached.clone());
cached
}
let transform_val = match last_transform {
Some(last) => {
debug!("now: {:?}, cached_at: {:?}, last_updated_at: {:?}", now_inst, last.cached_at, last.last_updated_at);
let fixed_time = match last.last_updated_at {
Some(last_updated_at) => last.cached_at - last_updated_at,
None => now_inst - last.cached_at
}.as_secs_f32();
let accumulator = last.cached_at.elapsed().as_secs_f32();
let alpha = accumulator / fixed_time;
};
let fixed_time = match cached.last_updated_at {
Some(last_updated_at) => cached.cached_at - last_updated_at,
None => now_inst - cached.cached_at
}.as_secs_f32();
let accumulator = (now_inst - cached.cached_at).as_secs_f32();
let alpha = accumulator / fixed_time;
debug!("fixed time: {fixed_time}, acc: {accumulator}, alpha: {alpha}");
let transform_val = cached.from_transform.lerp(cached.to_transform, alpha);
last.transform.lerp(transform.transform, alpha)
} ,
None => {
transform.transform
}
};
/*{
match self.entity_last_transforms.get_mut(&entity) {
Some(last) => {
last.transform = transform.transform.clone();
last.last_updated_at = Some(last.cached_at);
last.cached_at = Instant::now();
},
None => {
self.entity_last_transforms.insert(entity, CachedTransform {
last_updated_at: None,
cached_at: Instant::now(),
transform: transform.transform.clone(),
});
}
}
}*/
if !self.process_mesh(entity, transform.transform, mesh) && model_epoch == last_epoch {
let model = model.data.as_ref().unwrap().as_ref();
for mesh in model.meshes.iter() {
if !self.process_mesh(entity, transform_val, mesh) && model_epoch == last_epoch {
self.update_mesh_buffers(entity, mesh);
}