lighting, blinn-phong

This commit is contained in:
Lauri Räsänen 2022-10-03 22:31:01 +03:00
parent b741202111
commit 89789d5f4a
7 changed files with 420 additions and 103 deletions

View file

@ -53,12 +53,6 @@ impl Camera {
return cgmath::Matrix4::look_to_rh(self.position, forward, up); return cgmath::Matrix4::look_to_rh(self.position, forward, up);
} }
pub fn build_view_projection_matrix(&self) -> cgmath::Matrix4<f32> {
let view = self.get_view_matrix();
let proj = self.projection.get_matrix();
return proj * view;
}
pub fn get_vecs( pub fn get_vecs(
&self, &self,
) -> ( ) -> (
@ -72,33 +66,28 @@ impl Camera {
let forward = let forward =
cgmath::Vector3::new(pitch_cos * yaw_cos, pitch_sin, pitch_cos * yaw_sin).normalize(); cgmath::Vector3::new(pitch_cos * yaw_cos, pitch_sin, pitch_cos * yaw_sin).normalize();
let right = cgmath::Vector3::new(-yaw_sin, 0.0, yaw_cos).normalize(); let right = cgmath::Vector3::new(-yaw_sin, 0.0, yaw_cos).normalize();
let up = forward.cross(right); let up = right.cross(forward);
return (forward, right, up); return (forward, right, up);
} }
pub fn update(&mut self, dt: Duration, controller: &CameraController) { pub fn update(&mut self, dt: Duration, controller: &CameraController) {
let dt = dt.as_secs_f32(); let dt = dt.as_secs_f32();
self.pitch = clamp( self.pitch = clamp(
self.pitch + controller.deltay * controller.sensitivity * 0.022, self.pitch - controller.deltay * controller.sensitivity * 0.022,
-89.0, -89.0,
89.0, 89.0,
); );
self.yaw -= controller.deltax * controller.sensitivity * 0.022; self.yaw += controller.deltax * controller.sensitivity * 0.022;
self.yaw = self.yaw % 360.0; self.yaw = self.yaw % 360.0;
if self.yaw < 0.0 { if self.yaw < 0.0 {
self.yaw = 360.0 + self.yaw; self.yaw = 360.0 + self.yaw;
} }
println!(
"pitch: {:.6}, yaw: {:.6}, dt: {:.6}",
self.pitch, self.yaw, dt
);
let (forward, right, up) = self.get_vecs(); let (forward, right, up) = self.get_vecs();
self.position += self.position +=
forward * (controller.move_forward - controller.move_backward) * controller.speed * dt; forward * (controller.move_forward - controller.move_backward) * controller.speed * dt;
// FIXME -right
self.position += self.position +=
-right * (controller.move_right - controller.move_left) * controller.speed * dt; right * (controller.move_right - controller.move_left) * controller.speed * dt;
self.position += up * (controller.move_up - controller.move_down) * controller.speed * dt; self.position += up * (controller.move_up - controller.move_down) * controller.speed * dt;
} }
} }
@ -106,19 +95,25 @@ impl Camera {
#[repr(C)] #[repr(C)]
#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)] #[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
pub struct CameraUniform { pub struct CameraUniform {
pub view_proj: [[f32; 4]; 4], pub view: [[f32; 4]; 4],
pub proj: [[f32; 4]; 4],
pub position: [f32; 4],
} }
impl CameraUniform { impl CameraUniform {
pub fn new() -> Self { pub fn new() -> Self {
use cgmath::SquareMatrix; use cgmath::SquareMatrix;
Self { Self {
view_proj: cgmath::Matrix4::identity().into(), view: cgmath::Matrix4::identity().into(),
proj: cgmath::Matrix4::identity().into(),
position: [0.0; 4],
} }
} }
pub fn update_view_proj(&mut self, camera: &Camera) { pub fn update(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix().into(); self.view = camera.get_view_matrix().into();
self.proj = camera.projection.get_matrix().into();
self.position = camera.position.to_homogeneous().into();
} }
} }

109
src/core/light.rs Normal file
View file

@ -0,0 +1,109 @@
use std::ops::Range;
use super::model::{Mesh, Model};
#[repr(C)]
#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]
pub struct LightUniform {
pub position: [f32; 3],
_padding: u32,
pub color: [f32; 3],
_padding2: u32,
}
impl LightUniform {
pub fn new(position: [f32; 3], color: [f32; 3]) -> Self {
return LightUniform {
position: position,
_padding: 0,
color: color,
_padding2: 0,
};
}
}
pub trait DrawLight<'a> {
fn draw_light_mesh(
&mut self,
mesh: &'a Mesh,
camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
);
fn draw_light_mesh_instanced(
&mut self,
mesh: &'a Mesh,
instances: Range<u32>,
camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
);
fn draw_light_model(
&mut self,
model: &'a Model,
camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
);
fn draw_light_model_instanced(
&mut self,
model: &'a Model,
instances: Range<u32>,
camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
);
}
impl<'a, 'b> DrawLight<'b> for wgpu::RenderPass<'a>
where
'b: 'a,
{
fn draw_light_mesh(
&mut self,
mesh: &'b Mesh,
camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) {
self.draw_light_mesh_instanced(mesh, 0..1, camera_bind_group, light_bind_group);
}
fn draw_light_mesh_instanced(
&mut self,
mesh: &'b Mesh,
instances: Range<u32>,
camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) {
self.set_vertex_buffer(0, mesh.vertex_buffer.slice(..));
self.set_index_buffer(mesh.index_buffer.slice(..), wgpu::IndexFormat::Uint32);
self.set_bind_group(0, camera_bind_group, &[]);
self.set_bind_group(1, light_bind_group, &[]);
self.draw_indexed(0..mesh.num_elements, 0, instances);
}
fn draw_light_model(
&mut self,
model: &'b Model,
camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) {
self.draw_light_model_instanced(model, 0..1, camera_bind_group, light_bind_group);
}
fn draw_light_model_instanced(
&mut self,
model: &'b Model,
instances: Range<u32>,
camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) {
for mesh in &model.meshes {
self.draw_light_mesh_instanced(
mesh,
instances.clone(),
camera_bind_group,
light_bind_group,
);
}
}
}

View file

@ -1,5 +1,6 @@
pub mod camera; pub mod camera;
pub mod instance; pub mod instance;
pub mod light;
pub mod model; pub mod model;
pub mod resources; pub mod resources;
pub mod state; pub mod state;

View file

@ -66,6 +66,7 @@ pub trait DrawModel<'a> {
mesh: &'a Mesh, mesh: &'a Mesh,
material: &'a Material, material: &'a Material,
camera_bind_group: &'a wgpu::BindGroup, camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
); );
fn draw_mesh_instanced( fn draw_mesh_instanced(
&mut self, &mut self,
@ -73,13 +74,21 @@ pub trait DrawModel<'a> {
material: &'a Material, material: &'a Material,
instances: Range<u32>, instances: Range<u32>,
camera_bind_group: &'a wgpu::BindGroup, camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
);
fn draw_model(
&mut self,
model: &'a Model,
camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
); );
fn draw_model(&mut self, model: &'a Model, camera_bind_group: &'a wgpu::BindGroup);
fn draw_model_instanced( fn draw_model_instanced(
&mut self, &mut self,
model: &'a Model, model: &'a Model,
instances: Range<u32>, instances: Range<u32>,
camera_bind_group: &'a wgpu::BindGroup, camera_bind_group: &'a wgpu::BindGroup,
light_bind_group: &'a wgpu::BindGroup,
); );
} }
@ -92,8 +101,9 @@ where
mesh: &'b Mesh, mesh: &'b Mesh,
material: &'b Material, material: &'b Material,
camera_bind_group: &'b wgpu::BindGroup, camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) { ) {
self.draw_mesh_instanced(mesh, material, 0..1, camera_bind_group); self.draw_mesh_instanced(mesh, material, 0..1, camera_bind_group, light_bind_group);
} }
fn draw_mesh_instanced( fn draw_mesh_instanced(
@ -102,16 +112,23 @@ where
material: &'b Material, material: &'b Material,
instances: Range<u32>, instances: Range<u32>,
camera_bind_group: &'b wgpu::BindGroup, camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) { ) {
self.set_vertex_buffer(0, mesh.vertex_buffer.slice(..)); self.set_vertex_buffer(0, mesh.vertex_buffer.slice(..));
self.set_index_buffer(mesh.index_buffer.slice(..), wgpu::IndexFormat::Uint32); self.set_index_buffer(mesh.index_buffer.slice(..), wgpu::IndexFormat::Uint32);
self.set_bind_group(0, &material.bind_group, &[]); self.set_bind_group(0, &material.bind_group, &[]);
self.set_bind_group(1, camera_bind_group, &[]); self.set_bind_group(1, camera_bind_group, &[]);
self.set_bind_group(2, light_bind_group, &[]);
self.draw_indexed(0..mesh.num_elements, 0, instances); self.draw_indexed(0..mesh.num_elements, 0, instances);
} }
fn draw_model(&mut self, model: &'b Model, camera_bind_group: &'b wgpu::BindGroup) { fn draw_model(
self.draw_model_instanced(model, 0..1, camera_bind_group); &mut self,
model: &'b Model,
camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) {
self.draw_model_instanced(model, 0..1, camera_bind_group, light_bind_group);
} }
fn draw_model_instanced( fn draw_model_instanced(
@ -119,10 +136,17 @@ where
model: &'b Model, model: &'b Model,
instances: Range<u32>, instances: Range<u32>,
camera_bind_group: &'b wgpu::BindGroup, camera_bind_group: &'b wgpu::BindGroup,
light_bind_group: &'b wgpu::BindGroup,
) { ) {
for mesh in &model.meshes { for mesh in &model.meshes {
let material = &model.materials[mesh.material]; let material = &model.materials[mesh.material];
self.draw_mesh_instanced(mesh, material, instances.clone(), camera_bind_group); self.draw_mesh_instanced(
mesh,
material,
instances.clone(),
camera_bind_group,
light_bind_group,
);
} }
} }
} }

View file

@ -1,11 +1,12 @@
use cgmath::prelude::*; use cgmath::prelude::*;
use std::time::Duration; use std::time::Duration;
use wgpu::{include_wgsl, util::DeviceExt}; use wgpu::util::DeviceExt;
use winit::{event::*, window::Window}; use winit::{event::*, window::Window};
use super::camera::{Camera, CameraController, CameraUniform}; use super::camera::{Camera, CameraController, CameraUniform};
use super::instance::{Instance, InstanceRaw}; use super::instance::{Instance, InstanceRaw};
use super::light::{DrawLight, LightUniform};
use super::model::{DrawModel, Model, ModelVertex, Vertex}; use super::model::{DrawModel, Model, ModelVertex, Vertex};
use super::resources; use super::resources;
use super::texture::Texture; use super::texture::Texture;
@ -29,6 +30,10 @@ pub struct State {
instance_buffer: wgpu::Buffer, instance_buffer: wgpu::Buffer,
depth_texture: Texture, depth_texture: Texture,
obj_model: Model, obj_model: Model,
light_uniform: LightUniform,
light_buffer: wgpu::Buffer,
light_render_pipeline: wgpu::RenderPipeline,
light_bind_group: wgpu::BindGroup,
} }
impl State { impl State {
@ -71,7 +76,7 @@ impl State {
// Camera // Camera
let camera = Camera::new( let camera = Camera::new(
(0.0, 0.0, 0.0).into(), (0.0, 4.0, -4.0).into(),
0.0, 0.0,
0.0, 0.0,
60.0, 60.0,
@ -79,7 +84,7 @@ impl State {
); );
let mut camera_uniform = CameraUniform::new(); let mut camera_uniform = CameraUniform::new();
camera_uniform.update_view_proj(&camera); camera_uniform.update(&camera);
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"), label: Some("Camera Buffer"),
@ -90,7 +95,7 @@ impl State {
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry { entries: &[wgpu::BindGroupLayoutEntry {
binding: 0, binding: 0,
visibility: wgpu::ShaderStages::VERTEX, visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer { ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform, ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false, has_dynamic_offset: false,
@ -111,6 +116,39 @@ impl State {
let camera_controller = CameraController::new(1.0, 2.0); let camera_controller = CameraController::new(1.0, 2.0);
let light_uniform = LightUniform::new([2.0, 4.0, 2.0], [1.0, 1.0, 1.0]);
// We'll want to update our lights position, so we use COPY_DST
let light_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Light VB"),
contents: bytemuck::cast_slice(&[light_uniform]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let light_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: None,
});
let light_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &light_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: light_buffer.as_entire_binding(),
}],
label: None,
});
surface.configure(&device, &config); surface.configure(&device, &config);
let texture_bind_group_layout = let texture_bind_group_layout =
@ -175,62 +213,49 @@ impl State {
let depth_texture = Texture::create_depth_texture(&device, &config, "depth_texture"); let depth_texture = Texture::create_depth_texture(&device, &config, "depth_texture");
// let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { let render_pipeline = {
// label: Some("Shader"), let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
// source: wgpu::ShaderSource::Wgsl(include_str!("../shaders/test.wgsl").into()),
// });
let shader = device.create_shader_module(include_wgsl!("../shaders/test.wgsl"));
let render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Render Pipeline Layout"), label: Some("Render Pipeline Layout"),
bind_group_layouts: &[&texture_bind_group_layout, &camera_bind_group_layout], bind_group_layouts: &[
&texture_bind_group_layout,
&camera_bind_group_layout,
&light_bind_group_layout,
],
push_constant_ranges: &[], push_constant_ranges: &[],
}); });
let shader = wgpu::ShaderModuleDescriptor {
label: Some("Normal Shader"),
source: wgpu::ShaderSource::Wgsl(include_str!("../shaders/test.wgsl").into()),
};
create_render_pipeline(
&device,
&layout,
config.format,
Some(Texture::DEPTH_FORMAT),
&[ModelVertex::desc(), InstanceRaw::desc()],
shader,
)
};
let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { let light_render_pipeline = {
label: Some("Render Pipeline"), let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
layout: Some(&render_pipeline_layout), label: Some("Light Pipeline Layout"),
vertex: wgpu::VertexState { bind_group_layouts: &[&camera_bind_group_layout, &light_bind_group_layout],
module: &shader, push_constant_ranges: &[],
entry_point: "vs_main", });
buffers: &[ModelVertex::desc(), InstanceRaw::desc()], let shader = wgpu::ShaderModuleDescriptor {
}, label: Some("Light Shader"),
fragment: Some(wgpu::FragmentState { source: wgpu::ShaderSource::Wgsl(include_str!("../shaders/light.wgsl").into()),
module: &shader, };
entry_point: "fs_main", create_render_pipeline(
targets: &[Some(wgpu::ColorTargetState { &device,
format: config.format, &layout,
blend: Some(wgpu::BlendState::REPLACE), config.format,
write_mask: wgpu::ColorWrites::ALL, Some(Texture::DEPTH_FORMAT),
})], &[ModelVertex::desc()],
}), shader,
primitive: wgpu::PrimitiveState { )
topology: wgpu::PrimitiveTopology::TriangleList, };
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
// Setting this to anything other than Fill requires Features::NON_FILL_POLYGON_MODE
polygon_mode: wgpu::PolygonMode::Fill,
// Requires Features::DEPTH_CLIP_CONTROL
unclipped_depth: false,
// Requires Features::CONSERVATIVE_RASTERIZATION
conservative: false,
},
depth_stencil: Some(wgpu::DepthStencilState {
format: Texture::DEPTH_FORMAT,
depth_write_enabled: true,
depth_compare: wgpu::CompareFunction::Less,
stencil: wgpu::StencilState::default(),
bias: wgpu::DepthBiasState::default(),
}),
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
multiview: None,
});
return Self { return Self {
size, size,
@ -248,6 +273,10 @@ impl State {
instance_buffer, instance_buffer,
depth_texture, depth_texture,
obj_model, obj_model,
light_uniform,
light_buffer,
light_render_pipeline,
light_bind_group,
}; };
} }
@ -276,14 +305,25 @@ impl State {
} }
pub fn update(&mut self, dt: Duration) { pub fn update(&mut self, dt: Duration) {
// Update camera
self.camera.update(dt, &self.camera_controller); self.camera.update(dt, &self.camera_controller);
self.camera_controller.reset(false); self.camera_controller.reset(false);
self.camera_uniform.update_view_proj(&self.camera); self.camera_uniform.update(&self.camera);
self.queue.write_buffer( self.queue.write_buffer(
&self.camera_buffer, &self.camera_buffer,
0, 0,
bytemuck::cast_slice(&[self.camera_uniform]), bytemuck::cast_slice(&[self.camera_uniform]),
); );
// Update the light
let old_position: cgmath::Vector3<_> = self.light_uniform.position.into();
self.light_uniform.position =
(cgmath::Quaternion::from_angle_y(cgmath::Deg(1.0)) * old_position).into();
self.queue.write_buffer(
&self.light_buffer,
0,
bytemuck::cast_slice(&[self.light_uniform]),
);
} }
pub fn render(&mut self) -> Result<(), wgpu::SurfaceError> { pub fn render(&mut self) -> Result<(), wgpu::SurfaceError> {
@ -300,22 +340,19 @@ impl State {
{ {
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Render Pass"), label: Some("Render Pass"),
color_attachments: &[ color_attachments: &[Some(wgpu::RenderPassColorAttachment {
// This is what @location(0) in the fragment shader targets view: &view,
Some(wgpu::RenderPassColorAttachment { resolve_target: None,
view: &view, ops: wgpu::Operations {
resolve_target: None, load: wgpu::LoadOp::Clear(wgpu::Color {
ops: wgpu::Operations { r: 0.0,
load: wgpu::LoadOp::Clear(wgpu::Color { g: 0.0,
r: 0.0, b: 0.0,
g: 0.5, a: 1.0,
b: 0.0, }),
a: 1.0, store: true,
}), },
store: true, })],
},
}),
],
depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment { depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
view: &self.depth_texture.view, view: &self.depth_texture.view,
depth_ops: Some(wgpu::Operations { depth_ops: Some(wgpu::Operations {
@ -330,10 +367,19 @@ impl State {
render_pass.set_vertex_buffer(1, self.instance_buffer.slice(..)); render_pass.set_vertex_buffer(1, self.instance_buffer.slice(..));
render_pass.set_pipeline(&self.light_render_pipeline);
render_pass.draw_light_model(
&self.obj_model,
&self.camera_bind_group,
&self.light_bind_group,
);
render_pass.set_pipeline(&self.render_pipeline);
render_pass.draw_model_instanced( render_pass.draw_model_instanced(
&self.obj_model, &self.obj_model,
0..self.instances.len() as u32, 0..self.instances.len() as u32,
&self.camera_bind_group, &self.camera_bind_group,
&self.light_bind_group,
); );
} }
@ -344,3 +390,61 @@ impl State {
return Ok(()); return Ok(());
} }
} }
fn create_render_pipeline(
device: &wgpu::Device,
layout: &wgpu::PipelineLayout,
color_format: wgpu::TextureFormat,
depth_format: Option<wgpu::TextureFormat>,
vertex_layouts: &[wgpu::VertexBufferLayout],
shader: wgpu::ShaderModuleDescriptor,
) -> wgpu::RenderPipeline {
let shader = device.create_shader_module(shader);
return device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Render Pipeline"),
layout: Some(layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: vertex_layouts,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: color_format,
blend: Some(wgpu::BlendState {
alpha: wgpu::BlendComponent::REPLACE,
color: wgpu::BlendComponent::REPLACE,
}),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
// Setting this to anything other than Fill requires Features::NON_FILL_POLYGON_MODE
polygon_mode: wgpu::PolygonMode::Fill,
// Requires Features::DEPTH_CLIP_CONTROL
unclipped_depth: false,
// Requires Features::CONSERVATIVE_RASTERIZATION
conservative: false,
},
depth_stencil: depth_format.map(|format| wgpu::DepthStencilState {
format,
depth_write_enabled: true,
depth_compare: wgpu::CompareFunction::Less,
stencil: wgpu::StencilState::default(),
bias: wgpu::DepthBiasState::default(),
}),
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
multiview: None,
});
}

43
src/shaders/light.wgsl Normal file
View file

@ -0,0 +1,43 @@
// Vertex shader
struct Camera {
view: mat4x4<f32>,
proj: mat4x4<f32>,
position: vec4<f32>,
}
@group(0) @binding(0)
var<uniform> camera: Camera;
struct Light {
position: vec3<f32>,
color: vec3<f32>,
}
@group(1) @binding(0)
var<uniform> light: Light;
struct VertexInput {
@location(0) position: vec3<f32>,
};
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) color: vec3<f32>,
};
@vertex
fn vs_main(
model: VertexInput,
) -> VertexOutput {
let scale = 0.25;
var out: VertexOutput;
out.clip_position = camera.proj * camera.view * vec4<f32>(model.position * scale + light.position, 1.0);
out.color = light.color;
return out;
}
// Fragment shader
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return vec4<f32>(in.color, 1.0);
}

View file

@ -3,24 +3,36 @@ struct InstanceInput {
@location(6) model_matrix_1: vec4<f32>, @location(6) model_matrix_1: vec4<f32>,
@location(7) model_matrix_2: vec4<f32>, @location(7) model_matrix_2: vec4<f32>,
@location(8) model_matrix_3: vec4<f32>, @location(8) model_matrix_3: vec4<f32>,
}; }
// Vertex shader // Vertex shader
struct CameraUniform { struct CameraUniform {
view_proj: mat4x4<f32>, view: mat4x4<f32>,
}; proj: mat4x4<f32>,
position: vec4<f32>,
}
@group(1) @binding(0) @group(1) @binding(0)
var<uniform> camera: CameraUniform; var<uniform> camera: CameraUniform;
struct Light {
position: vec3<f32>,
color: vec3<f32>,
}
@group(2) @binding(0)
var<uniform> light: Light;
struct VertexInput { struct VertexInput {
@location(0) position: vec3<f32>, @location(0) position: vec3<f32>,
@location(1) tex_coords: vec2<f32>, @location(1) tex_coords: vec2<f32>,
@location(2) normal: vec3<f32>,
} }
struct VertexOutput { struct VertexOutput {
@builtin(position) clip_position: vec4<f32>, @builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>, @location(0) tex_coords: vec2<f32>,
@location(1) world_normal: vec3<f32>,
@location(2) world_position: vec3<f32>,
} }
@vertex @vertex
@ -34,11 +46,20 @@ fn vs_main(
instance.model_matrix_2, instance.model_matrix_2,
instance.model_matrix_3, instance.model_matrix_3,
); );
var out: VertexOutput; var out: VertexOutput;
out.tex_coords = model.tex_coords; out.tex_coords = model.tex_coords;
out.clip_position = camera.view_proj * model_matrix * vec4<f32>(model.position, 1.0);
out.world_normal = normalize((model_matrix * vec4<f32>(model.normal, 0.0)).xyz);
var world_position: vec4<f32> = model_matrix * vec4<f32>(model.position, 1.0);
out.world_position = world_position.xyz;
out.clip_position = camera.proj * camera.view * world_position;
return out; return out;
} }
// Fragment shader // Fragment shader
@ -49,5 +70,25 @@ var s_diffuse: sampler;
@fragment @fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> { fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t_diffuse, s_diffuse, in.tex_coords); let object_color: vec4<f32> = textureSample(t_diffuse, s_diffuse, in.tex_coords);
let light_dir = normalize(light.position - in.world_position);
// ambient
let ambient_strength = 0.05;
let ambient_color = light.color * ambient_strength;
// diffuse
let diffuse_strength = max(dot(in.world_normal, light_dir), 0.0);
let diffuse_color = light.color * diffuse_strength;
// specular
let view_dir = normalize(camera.position.xyz - in.world_position);
let half_dir = normalize(view_dir + light_dir);
let specular_strength = pow(max(dot(in.world_normal, half_dir), 0.0), 32.0);
let specular_color = specular_strength * light.color;
let result = (ambient_color + diffuse_color + specular_color) * object_color.xyz;
return vec4<f32>(result, object_color.a);
} }