Tiled Forward Rendering #5

Merged
SeanOMik merged 15 commits from feature/tiled-forward-rendering into main 2024-03-23 14:38:43 +00:00
4 changed files with 111 additions and 70 deletions
Showing only changes of commit 22c08ba66e - Show all commits

View File

@ -1,3 +1,5 @@
use std::{mem, num::NonZeroU64};
use winit::dpi::PhysicalSize;
use crate::{math::{Angle, OPENGL_TO_WGPU_MATRIX}, scene::CameraComponent};
@ -38,16 +40,34 @@ impl Projection {
#[repr(C)]
#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraUniform {
/// The view matrix of the camera
pub view_mat: glam::Mat4,
/// The view projection matrix
pub view_proj: glam::Mat4,
// vec4 is used because of the uniforms 16 byte spacing requirement
pub view_pos: glam::Vec4,
/// The position of the camera
pub position: glam::Vec3,
_padding: u32,
}
impl Default for CameraUniform {
fn default() -> Self {
Self {
view_mat: glam::Mat4::IDENTITY,
view_proj: glam::Mat4::IDENTITY,
view_pos: Default::default()
position: Default::default(),
_padding: 0,
}
}
}
impl CameraUniform {
pub fn new(view_mat: glam::Mat4, view_proj: glam::Mat4, position: glam::Vec3) -> Self {
Self {
view_mat,
view_proj,
position,
_padding: 0
}
}
}
@ -79,26 +99,29 @@ impl RenderCamera {
self.aspect = size.width as f32 / size.height as f32;
}
pub fn update_view_projection(&mut self, camera: &CameraComponent) -> &glam::Mat4 {
/// Calculates the view projection, and the view
///
/// Returns: A tuple with the view projection as the first element, and the
/// view matrix as the second.
pub fn calc_view_projection(&mut self, camera: &CameraComponent) -> (&glam::Mat4, glam::Mat4) {
let position = camera.transform.translation;
let forward = camera.transform.forward();
let up = camera.transform.up();
let view = glam::Mat4::look_to_rh(
position,
forward,
up
);
match camera.mode {
CameraProjectionMode::Perspective => {
let position = camera.transform.translation;
let forward = camera.transform.forward();
let up = camera.transform.up();
let view = glam::Mat4::look_to_rh(
position,
forward,
up
);
let proj = glam::Mat4::perspective_rh_gl(camera.fov.to_radians(), self.aspect, self.znear, self.zfar);
self.view_proj = OPENGL_TO_WGPU_MATRIX * proj * view;
&self.view_proj
(&self.view_proj, view)
},
CameraProjectionMode::Orthographic => {
let position = camera.transform.translation;
let target = camera.transform.rotation * glam::Vec3::new(0.0, 0.0, -1.0);
let target = target.normalize();
@ -111,7 +134,7 @@ impl RenderCamera {
let proj = glam::Mat4::orthographic_rh_gl(-size_x, size_x, -size_y, size_y, self.znear, self.zfar);
self.view_proj = OPENGL_TO_WGPU_MATRIX * proj;
&self.view_proj
(&self.view_proj, view)
},
}
}

View File

@ -1,4 +1,4 @@
use std::{sync::Arc, num::NonZeroU32};
use std::{num::NonZeroU32, ops::Deref, sync::Arc};
use wgpu::util::DeviceExt;
@ -68,11 +68,63 @@ impl BufferWrapper {
}
}
/// Creates a builder for a BufferWrapper
pub fn builder() -> BufferWrapperBuilder {
BufferWrapperBuilder::new()
}
/// Retrieve the layout of the bindgroup associated with this buffer.
///
/// Returns None if this buffer object was not provided a bindgroup.
pub fn bindgroup_layout(&self) -> Option<&wgpu::BindGroupLayout> {
self.bindgroup_pair.as_ref().map(|bg| bg.layout.deref())
}
/// Queue's the data to be written to `buffer` starting at `offset`.
///
/// The write is not immediately submitted, and instead enqueued
/// internally to happen at the start of the next submit() call.
///
/// This method fails if data overruns the size of buffer starting at offset.
///
/// See [`wgpu::Queue::write_buffer`](https://docs.rs/wgpu/latest/wgpu/struct.Queue.html#method.write_buffer).
pub fn write_buffer<T>(&self, queue: &wgpu::Queue, offset: u64, data: &[T])
where
T: bytemuck::NoUninit
{
queue.write_buffer(&self.inner_buf, offset, bytemuck::cast_slice(data));
}
/// Sets the buffer's bind group to `index` in the `pass`.
///
/// The bind group layout in the active pipeline when any `draw()` function is called must
/// match the layout of this bind group.
///
/// See [`wgpu::RenderPass::set_bind_group`](https://docs.rs/wgpu/latest/wgpu/struct.RenderPass.html#method.set_bind_group).
pub fn bind_at_bind_group<'a, 'b>(
&'a self,
pass: &'b mut wgpu::RenderPass<'a>,
index: u32,
offsets: &[wgpu::DynamicOffset],
) {
let pair = self.bindgroup_pair.as_ref().expect(
"BufferWrapper is missing bindgroup pair! Cannot set bind group on RenderPass!",
);
pass.set_bind_group(index, &pair.bindgroup, offsets);
}
}
/// Struct used for building a BufferWrapper
///
/// ```nobuild
/// let camera_buffer = BufferWrapper::builder()
/// .buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
/// .contents(&[CameraUniform::default()])
/// .label_prefix("Camera")
/// .visibility(wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT)
/// .buffer_dynamic_offset(false)
/// .finish(&device);
/// ```
#[derive(Default)]
pub struct BufferWrapperBuilder {
buffer_usage: Option<wgpu::BufferUsages>,

View File

@ -88,8 +88,8 @@ pub struct BasicRenderer {
render_limits: Limits,
inuse_camera: RenderCamera,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
camera_buffer: BufferWrapper,
//camera_bind_group: wgpu::BindGroup,
bgl_texture: Arc<BindGroupLayout>,
default_texture: RenderTexture,
@ -143,10 +143,7 @@ impl BasicRenderer {
let render_limits = device.limits();
let surface_caps = surface.get_capabilities(&adapter);
let present_mode = surface_caps.present_modes[0]; /* match surface_caps.present_modes.contains(&wgpu::PresentMode::Immediate) {
true => wgpu::PresentMode::Immediate,
false => surface_caps.present_modes[0]
}; */
let present_mode = surface_caps.present_modes[0];
debug!("present mode: {:?}", present_mode);
@ -174,41 +171,13 @@ impl BasicRenderer {
});
let transform_buffers = TransformBuffers::new(&device);
let camera_buffer = device.create_buffer_init(
&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"),
contents: bytemuck::cast_slice(&[CameraUniform::default()]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
}
);
let camera_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}
],
label: Some("camera_bind_group_layout"),
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}
],
label: Some("camera_bind_group"),
});
let camera_buffer = BufferWrapper::builder()
.buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
.contents(&[CameraUniform::default()])
.label_prefix("Camera")
.visibility(wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT)
.buffer_dynamic_offset(false)
.finish(&device);
let depth_texture = RenderTexture::create_depth_texture(&device, &config, "Depth Buffer");
@ -222,7 +191,6 @@ impl BasicRenderer {
.buffer_usage(wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST)
.visibility(wgpu::ShaderStages::FRAGMENT)
.contents(&[MaterialUniform::default()])
//.size(mem::size_of::<MaterialUniform>())
.finish(&device);
let mut s = Self {
@ -248,7 +216,6 @@ impl BasicRenderer {
inuse_camera: RenderCamera::new(size),
camera_buffer,
camera_bind_group,
bgl_texture,
default_texture,
@ -263,7 +230,8 @@ impl BasicRenderer {
let mut pipelines = HashMap::new();
pipelines.insert(0, Arc::new(FullRenderPipeline::new(&s.device, &s.config, &shader,
vec![super::vertex::Vertex::desc(),],
vec![&s.bgl_texture, &s.transform_buffers.bindgroup_layout, &camera_bind_group_layout,
vec![&s.bgl_texture, &s.transform_buffers.bindgroup_layout,
s.camera_buffer.bindgroup_layout().unwrap(),
&s.light_buffers.bindgroup_layout, &s.material_buffer.bindgroup_pair.as_ref().unwrap().layout,
&s.bgl_texture])));
s.render_pipelines = pipelines;
@ -511,13 +479,10 @@ impl Renderer for BasicRenderer {
}
if let Some(camera) = main_world.view_iter::<&mut CameraComponent>().next() {
let view_proj = self.inuse_camera.update_view_projection(&camera);
let (view_proj, view_mat) = self.inuse_camera.calc_view_projection(&camera);
let pos = camera.transform.translation;
let uniform = CameraUniform {
view_proj: *view_proj,
view_pos: glam::Vec4::new(pos.x, pos.y, pos.z, 0.0),
};
self.queue.write_buffer(&self.camera_buffer, 0, bytemuck::cast_slice(&[uniform]));
let uniform = CameraUniform::new(view_mat, *view_proj, pos);
self.camera_buffer.write_buffer(&self.queue, 0, &[uniform]);
} else {
warn!("Missing camera!");
}
@ -588,7 +553,7 @@ impl Renderer for BasicRenderer {
let offset = TransformBuffers::index_offset(&self.render_limits, transform_indices) as u32;
render_pass.set_bind_group(1, bindgroup, &[ offset, offset, ]);
render_pass.set_bind_group(2, &self.camera_bind_group, &[]);
self.camera_buffer.bind_at_bind_group(&mut render_pass, 2, &[]);
render_pass.set_bind_group(3, &self.light_buffers.bindgroup, &[]);
render_pass.set_bind_group(4, &self.material_buffer.bindgroup_pair.as_ref().unwrap().bindgroup, &[]);

View File

@ -16,8 +16,9 @@ struct VertexOutput {
}
struct CameraUniform {
view_mat: mat4x4<f32>,
view_proj: mat4x4<f32>,
view_pos: vec4<f32>,
view_pos: vec3<f32>,
};
struct PointLight {