render: improve the render buffer wrapper, use it for the camera uniform

This commit is contained in:
SeanOMik 2024-03-14 23:08:21 -04:00
parent f345f065c1
commit 22c08ba66e
Signed by: SeanOMik
GPG Key ID: FEC9E2FC15235964
4 changed files with 111 additions and 70 deletions

View File

@ -1,3 +1,5 @@
use std::{mem, num::NonZeroU64};
use winit::dpi::PhysicalSize; use winit::dpi::PhysicalSize;
use crate::{math::{Angle, OPENGL_TO_WGPU_MATRIX}, scene::CameraComponent}; use crate::{math::{Angle, OPENGL_TO_WGPU_MATRIX}, scene::CameraComponent};
@ -38,16 +40,34 @@ impl Projection {
#[repr(C)] #[repr(C)]
#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] #[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraUniform { pub struct CameraUniform {
/// The view matrix of the camera
pub view_mat: glam::Mat4,
/// The view projection matrix
pub view_proj: glam::Mat4, pub view_proj: glam::Mat4,
// vec4 is used because of the uniforms 16 byte spacing requirement /// The position of the camera
pub view_pos: glam::Vec4, pub position: glam::Vec3,
_padding: u32,
} }
impl Default for CameraUniform { impl Default for CameraUniform {
fn default() -> Self { fn default() -> Self {
Self { Self {
view_mat: glam::Mat4::IDENTITY,
view_proj: glam::Mat4::IDENTITY, view_proj: glam::Mat4::IDENTITY,
view_pos: Default::default() position: Default::default(),
_padding: 0,
}
}
}
impl CameraUniform {
pub fn new(view_mat: glam::Mat4, view_proj: glam::Mat4, position: glam::Vec3) -> Self {
Self {
view_mat,
view_proj,
position,
_padding: 0
} }
} }
} }
@ -79,9 +99,11 @@ impl RenderCamera {
self.aspect = size.width as f32 / size.height as f32; self.aspect = size.width as f32 / size.height as f32;
} }
pub fn update_view_projection(&mut self, camera: &CameraComponent) -> &glam::Mat4 { /// Calculates the view projection, and the view
match camera.mode { ///
CameraProjectionMode::Perspective => { /// Returns: A tuple with the view projection as the first element, and the
/// view matrix as the second.
pub fn calc_view_projection(&mut self, camera: &CameraComponent) -> (&glam::Mat4, glam::Mat4) {
let position = camera.transform.translation; let position = camera.transform.translation;
let forward = camera.transform.forward(); let forward = camera.transform.forward();
let up = camera.transform.up(); let up = camera.transform.up();
@ -92,13 +114,14 @@ impl RenderCamera {
up up
); );
match camera.mode {
CameraProjectionMode::Perspective => {
let proj = glam::Mat4::perspective_rh_gl(camera.fov.to_radians(), self.aspect, self.znear, self.zfar); let proj = glam::Mat4::perspective_rh_gl(camera.fov.to_radians(), self.aspect, self.znear, self.zfar);
self.view_proj = OPENGL_TO_WGPU_MATRIX * proj * view; self.view_proj = OPENGL_TO_WGPU_MATRIX * proj * view;
&self.view_proj (&self.view_proj, view)
}, },
CameraProjectionMode::Orthographic => { CameraProjectionMode::Orthographic => {
let position = camera.transform.translation;
let target = camera.transform.rotation * glam::Vec3::new(0.0, 0.0, -1.0); let target = camera.transform.rotation * glam::Vec3::new(0.0, 0.0, -1.0);
let target = target.normalize(); let target = target.normalize();
@ -111,7 +134,7 @@ impl RenderCamera {
let proj = glam::Mat4::orthographic_rh_gl(-size_x, size_x, -size_y, size_y, self.znear, self.zfar); let proj = glam::Mat4::orthographic_rh_gl(-size_x, size_x, -size_y, size_y, self.znear, self.zfar);
self.view_proj = OPENGL_TO_WGPU_MATRIX * proj; self.view_proj = OPENGL_TO_WGPU_MATRIX * proj;
&self.view_proj (&self.view_proj, view)
}, },
} }
} }

View File

@ -1,4 +1,4 @@
use std::{sync::Arc, num::NonZeroU32}; use std::{num::NonZeroU32, ops::Deref, sync::Arc};
use wgpu::util::DeviceExt; use wgpu::util::DeviceExt;
@ -68,11 +68,63 @@ impl BufferWrapper {
} }
} }
/// Creates a builder for a BufferWrapper
pub fn builder() -> BufferWrapperBuilder { pub fn builder() -> BufferWrapperBuilder {
BufferWrapperBuilder::new() BufferWrapperBuilder::new()
} }
/// Retrieve the layout of the bindgroup associated with this buffer.
///
/// Returns None if this buffer object was not provided a bindgroup.
pub fn bindgroup_layout(&self) -> Option<&wgpu::BindGroupLayout> {
self.bindgroup_pair.as_ref().map(|bg| bg.layout.deref())
} }
/// Queue's the data to be written to `buffer` starting at `offset`.
///
/// The write is not immediately submitted, and instead enqueued
/// internally to happen at the start of the next submit() call.
///
/// This method fails if data overruns the size of buffer starting at offset.
///
/// See [`wgpu::Queue::write_buffer`](https://docs.rs/wgpu/latest/wgpu/struct.Queue.html#method.write_buffer).
pub fn write_buffer<T>(&self, queue: &wgpu::Queue, offset: u64, data: &[T])
where
T: bytemuck::NoUninit
{
queue.write_buffer(&self.inner_buf, offset, bytemuck::cast_slice(data));
}
/// Sets the buffer's bind group to `index` in the `pass`.
///
/// The bind group layout in the active pipeline when any `draw()` function is called must
/// match the layout of this bind group.
///
/// See [`wgpu::RenderPass::set_bind_group`](https://docs.rs/wgpu/latest/wgpu/struct.RenderPass.html#method.set_bind_group).
pub fn bind_at_bind_group<'a, 'b>(
&'a self,
pass: &'b mut wgpu::RenderPass<'a>,
index: u32,
offsets: &[wgpu::DynamicOffset],
) {
let pair = self.bindgroup_pair.as_ref().expect(
"BufferWrapper is missing bindgroup pair! Cannot set bind group on RenderPass!",
);
pass.set_bind_group(index, &pair.bindgroup, offsets);
}
}
/// Struct used for building a BufferWrapper
///
/// ```nobuild
/// let camera_buffer = BufferWrapper::builder()
/// .buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
/// .contents(&[CameraUniform::default()])
/// .label_prefix("Camera")
/// .visibility(wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT)
/// .buffer_dynamic_offset(false)
/// .finish(&device);
/// ```
#[derive(Default)] #[derive(Default)]
pub struct BufferWrapperBuilder { pub struct BufferWrapperBuilder {
buffer_usage: Option<wgpu::BufferUsages>, buffer_usage: Option<wgpu::BufferUsages>,

View File

@ -88,8 +88,8 @@ pub struct BasicRenderer {
render_limits: Limits, render_limits: Limits,
inuse_camera: RenderCamera, inuse_camera: RenderCamera,
camera_buffer: wgpu::Buffer, camera_buffer: BufferWrapper,
camera_bind_group: wgpu::BindGroup, //camera_bind_group: wgpu::BindGroup,
bgl_texture: Arc<BindGroupLayout>, bgl_texture: Arc<BindGroupLayout>,
default_texture: RenderTexture, default_texture: RenderTexture,
@ -143,10 +143,7 @@ impl BasicRenderer {
let render_limits = device.limits(); let render_limits = device.limits();
let surface_caps = surface.get_capabilities(&adapter); let surface_caps = surface.get_capabilities(&adapter);
let present_mode = surface_caps.present_modes[0]; /* match surface_caps.present_modes.contains(&wgpu::PresentMode::Immediate) { let present_mode = surface_caps.present_modes[0];
true => wgpu::PresentMode::Immediate,
false => surface_caps.present_modes[0]
}; */
debug!("present mode: {:?}", present_mode); debug!("present mode: {:?}", present_mode);
@ -174,41 +171,13 @@ impl BasicRenderer {
}); });
let transform_buffers = TransformBuffers::new(&device); let transform_buffers = TransformBuffers::new(&device);
let camera_buffer = BufferWrapper::builder()
let camera_buffer = device.create_buffer_init( .buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
&wgpu::util::BufferInitDescriptor { .contents(&[CameraUniform::default()])
label: Some("Camera Buffer"), .label_prefix("Camera")
contents: bytemuck::cast_slice(&[CameraUniform::default()]), .visibility(wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT)
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, .buffer_dynamic_offset(false)
} .finish(&device);
);
let camera_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}
],
label: Some("camera_bind_group_layout"),
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}
],
label: Some("camera_bind_group"),
});
let depth_texture = RenderTexture::create_depth_texture(&device, &config, "Depth Buffer"); let depth_texture = RenderTexture::create_depth_texture(&device, &config, "Depth Buffer");
@ -222,7 +191,6 @@ impl BasicRenderer {
.buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST) .buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
.visibility(wgpu::ShaderStages::FRAGMENT) .visibility(wgpu::ShaderStages::FRAGMENT)
.contents(&[MaterialUniform::default()]) .contents(&[MaterialUniform::default()])
//.size(mem::size_of::<MaterialUniform>())
.finish(&device); .finish(&device);
let mut s = Self { let mut s = Self {
@ -248,7 +216,6 @@ impl BasicRenderer {
inuse_camera: RenderCamera::new(size), inuse_camera: RenderCamera::new(size),
camera_buffer, camera_buffer,
camera_bind_group,
bgl_texture, bgl_texture,
default_texture, default_texture,
@ -263,7 +230,8 @@ impl BasicRenderer {
let mut pipelines = HashMap::new(); let mut pipelines = HashMap::new();
pipelines.insert(0, Arc::new(FullRenderPipeline::new(&s.device, &s.config, &shader, pipelines.insert(0, Arc::new(FullRenderPipeline::new(&s.device, &s.config, &shader,
vec![super::vertex::Vertex::desc(),], vec![super::vertex::Vertex::desc(),],
vec![&s.bgl_texture, &s.transform_buffers.bindgroup_layout, &camera_bind_group_layout, vec![&s.bgl_texture, &s.transform_buffers.bindgroup_layout,
s.camera_buffer.bindgroup_layout().unwrap(),
&s.light_buffers.bindgroup_layout, &s.material_buffer.bindgroup_pair.as_ref().unwrap().layout, &s.light_buffers.bindgroup_layout, &s.material_buffer.bindgroup_pair.as_ref().unwrap().layout,
&s.bgl_texture]))); &s.bgl_texture])));
s.render_pipelines = pipelines; s.render_pipelines = pipelines;
@ -511,13 +479,10 @@ impl Renderer for BasicRenderer {
} }
if let Some(camera) = main_world.view_iter::<&mut CameraComponent>().next() { if let Some(camera) = main_world.view_iter::<&mut CameraComponent>().next() {
let view_proj = self.inuse_camera.update_view_projection(&camera); let (view_proj, view_mat) = self.inuse_camera.calc_view_projection(&camera);
let pos = camera.transform.translation; let pos = camera.transform.translation;
let uniform = CameraUniform { let uniform = CameraUniform::new(view_mat, *view_proj, pos);
view_proj: *view_proj, self.camera_buffer.write_buffer(&self.queue, 0, &[uniform]);
view_pos: glam::Vec4::new(pos.x, pos.y, pos.z, 0.0),
};
self.queue.write_buffer(&self.camera_buffer, 0, bytemuck::cast_slice(&[uniform]));
} else { } else {
warn!("Missing camera!"); warn!("Missing camera!");
} }
@ -588,7 +553,7 @@ impl Renderer for BasicRenderer {
let offset = TransformBuffers::index_offset(&self.render_limits, transform_indices) as u32; let offset = TransformBuffers::index_offset(&self.render_limits, transform_indices) as u32;
render_pass.set_bind_group(1, bindgroup, &[ offset, offset, ]); render_pass.set_bind_group(1, bindgroup, &[ offset, offset, ]);
render_pass.set_bind_group(2, &self.camera_bind_group, &[]); self.camera_buffer.bind_at_bind_group(&mut render_pass, 2, &[]);
render_pass.set_bind_group(3, &self.light_buffers.bindgroup, &[]); render_pass.set_bind_group(3, &self.light_buffers.bindgroup, &[]);
render_pass.set_bind_group(4, &self.material_buffer.bindgroup_pair.as_ref().unwrap().bindgroup, &[]); render_pass.set_bind_group(4, &self.material_buffer.bindgroup_pair.as_ref().unwrap().bindgroup, &[]);

View File

@ -16,8 +16,9 @@ struct VertexOutput {
} }
struct CameraUniform { struct CameraUniform {
view_mat: mat4x4<f32>,
view_proj: mat4x4<f32>, view_proj: mat4x4<f32>,
view_pos: vec4<f32>, view_pos: vec3<f32>,
}; };
struct PointLight { struct PointLight {