NAHY2GVWYMPBD7O4R3AEHDHFD3A2MVCISDHDMMRYB6K5ZERSZSAQC
QRA2RTSWERK2GJVKOH4F25KBZSNDHYVLGHSKL7IVSLD4QPMDOXDAC
LK56CXDULK2QZHR4QOKKDZYTDJ6JZU676CLMUT4HOU32IUQG2IEAC
KWCDYED7S3JSVY7XLRPEJBIWW52NEZ4SSXRRVP5DV7MCZQU7LPXAC
3GAL4KPRWFKBWY2AT4YE53JHQGT6H46UEKUPIT4CHKZY3NSVLHZAC
LJD76LBNKTWSAHLRGL7EXNMM5HYRSOD6VXWYQSX2XVK7TQROTZ6QC
HTDZ53HWD4X3DSFZX7MMC7DPFJNHQPGBVLJD2S325SQRRVIQ3CLAC
HUZ7YFN6VIDK3MFB45AC5HP5JLT7XKJQ4SMNXTFY2L5TRTFY4OHAC
34CROJEM52NB7TGBCGGMIYAQ2AZUI44FWO25MGYYUSCMHLCLJNAQC
BYFMGWJ5ZPXZTCIFDHP66KK7JTEKT2NO7HR2XY7T5WRAAG3L5WYQC
EEO6C4YSWFW3JOZ3JH6K7X4TKAUTVJD53CQNYZHI2QY6REGEXUCAC
O5P6HCPWGMGJBFJEMC3SYQJ5OEW2AQV4KEJCMRVTTS3K6M45Z3BAC
OM6475B3O4T22Z22CPMOW4AN54JWQ34HXPMIV26MLRXRIKKBSTAAC
L6RIUKGLJZLAOKFGUDTZKBPP4HUBPEZAKHJEQHO34WFF62AB2ZIQC
use encase::{ShaderType, UniformBuffer};
use naga_oil::compose;
use wgpu::util::DeviceExt;
use winit::window::Window;
/// Information sent to the shader about our camera
#[derive(ShaderType)]
struct CameraUniform {
view_proj: glam::Mat4,
}
impl CameraUniform {
fn new() -> Self {
Self {
view_proj: glam::Mat4::IDENTITY,
}
}
fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix();
}
}
// world_texture: wgpu::Texture,
world_texture_view: wgpu::TextureView,
// world_texture_sampler: wgpu::Sampler,
world_texture_bind_group: wgpu::BindGroup,
camera_uniform: CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
render_options_buffer: wgpu::Buffer,
render_options_bind_group: wgpu::BindGroup,
/// Renders from the world texture to screen space within the bounds of the camera
// TODO eventually handle screen space effects
world_render_pipeline: wgpu::RenderPipeline,
// num_world_vertices: u32,
num_world_indices: u32,
world_vertex_buffer: wgpu::Buffer,
world_index_buffer: wgpu::Buffer,
}
/// Most general type of vertex
/// Specifies a position, then tex coordinates, then an associated color (w/ alpha)
#[repr(C)]
#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]
struct Vertex {
position: [f32; 3],
tex_coords: [f32; 2],
color: [f32; 4],
}
impl Vertex {
fn desc() -> wgpu::VertexBufferLayout<'static> {
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
shader_location: 1,
format: wgpu::VertexFormat::Float32x2,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 5]>() as wgpu::BufferAddress,
shader_location: 2,
format: wgpu::VertexFormat::Float32x4,
},
],
}
}
}
const INDICES: &'static [u16] = &[0, 2, 3, 0, 1, 2];
/// `w` and `h` are percentages of the screen to use to render the world, (0.0, 1.0]
fn generate_world_vertices(w: f32, h: f32) -> Vec<Vertex> {
let verts = vec![
Vertex {
position: [-w, -h, 0.0],
tex_coords: [0.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [w, -h, 0.0],
tex_coords: [1.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [w, h, 0.0],
tex_coords: [1.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [-w, h, 0.0],
tex_coords: [0.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
];
verts
}
// HDR module
Ok(composer)
}
pub fn world_texture_view(&self) -> &wgpu::TextureView {
&self.world_texture_view
}
device: &wgpu::Device,
texture_format: wgpu::TextureFormat,
world_texture_format: wgpu::TextureFormat,
) -> color_eyre::Result<Self> {
let _ = device;
let _ = texture_format;
let num_world_indices = Self::INDICES.len() as u32;
let world_vertices = Self::generate_world_vertices(1.0, 1.0);
// let num_world_vertices = world_vertices.len() as u32;
let world_vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(&world_vertices),
usage: wgpu::BufferUsages::VERTEX | wgpu::BufferUsages::COPY_DST,
});
let world_index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(Self::INDICES),
usage: wgpu::BufferUsages::INDEX,
});
let world_texture = device.create_texture(&wgpu::TextureDescriptor {
size: wgpu::Extent3d {
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: world_texture_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
label: Some("world texture"),
view_formats: &[],
});
let world_texture_view = world_texture.create_view(&wgpu::TextureViewDescriptor::default());
let world_texture_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
let world_texture_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("world texture bind group layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
],
});
let world_texture_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("world texture bind group"),
layout: &world_texture_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&world_texture_view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&world_texture_sampler),
},
],
});
let camera_uniform = CameraUniform::new();
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&camera_uniform).unwrap();
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("camera buffer"),
contents: &uniform_buffer.into_inner(),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let camera_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("camera bind group layout"),
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("camera bind group"),
});
let render_options = RenderOptions::default();
let mut render_options_uniform_buffer = UniformBuffer::new(Vec::new());
render_options_uniform_buffer
.write(&render_options)
.unwrap();
let render_options_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("render options buffer"),
contents: &render_options_uniform_buffer.into_inner(),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let render_options_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: Some("render options bind group layout"),
});
let render_options_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("render options bind group"),
layout: &render_options_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: render_options_buffer.as_entire_binding(),
}],
});
let composed_world_shader = composer.make_naga_module(compose::NagaModuleDescriptor {
source: include_str!("../../world_shader.wgsl"),
file_path: "world_shader.wgsl",
shader_type: compose::ShaderType::Wgsl,
shader_defs: HashMap::new(),
additional_imports: &[],
})?;
let mut validator = naga::valid::Validator::new(
naga::valid::ValidationFlags::default(),
naga::valid::Capabilities::default(),
);
let composed_world_shader_module_info = validator.validate(&composed_world_shader)?;
let shader_source = naga::back::wgsl::write_string(
&composed_world_shader,
&composed_world_shader_module_info,
naga::back::wgsl::WriterFlags::EXPLICIT_TYPES,
)?;
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("world shader"),
source: wgpu::ShaderSource::Wgsl(shader_source.into()),
});
let world_render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("world render pipeline layout"),
bind_group_layouts: &[
&world_texture_bind_group_layout,
&camera_bind_group_layout,
&render_options_bind_group_layout,
],
push_constant_ranges: &[],
});
let world_render_pipeline =
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("world render pipeline"),
layout: Some(&world_render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[Vertex::desc()],
},
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
polygon_mode: wgpu::PolygonMode::Fill,
unclipped_depth: false,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: texture_format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
multiview: None,
});
Ok(Self {
world_texture_view,
world_texture_bind_group,
camera_uniform,
camera_buffer,
camera_bind_group,
render_options_buffer,
render_options_bind_group,
world_render_pipeline,
num_world_indices,
world_vertex_buffer,
world_index_buffer,
})
}
pub fn prepare(
&mut self,
_device: &wgpu::Device,
queue: &wgpu::Queue,
window: &Window,
camera: &mut Camera,
render_options: &RenderOptions,
_document: &Document,
) {
self.prepared
.store(true, std::sync::atomic::Ordering::SeqCst);
// Update the world vertex buffer
let new_world_vertices = Self::generate_world_vertices(w, h);
queue.write_buffer(
&self.world_vertex_buffer,
0,
bytemuck::cast_slice(&new_world_vertices),
);
}
// Update the camera and camera buffer
self.camera_uniform.update_view_proj(camera);
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&self.camera_uniform).unwrap();
queue.write_buffer(&self.camera_buffer, 0, &uniform_buffer.into_inner());
// Update render options
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(render_options).unwrap();
queue.write_buffer(&self.render_options_buffer, 0, &uniform_buffer.into_inner());
}
render_pass.set_pipeline(&self.world_render_pipeline);
render_pass.set_bind_group(0, &self.world_texture_bind_group, &[]);
render_pass.set_bind_group(1, &self.camera_bind_group, &[]);
render_pass.set_bind_group(2, &self.render_options_bind_group, &[]);
render_pass.set_vertex_buffer(0, self.world_vertex_buffer.slice(..));
render_pass.set_index_buffer(self.world_index_buffer.slice(..), wgpu::IndexFormat::Uint16);
render_pass.draw_indexed(0..self.num_world_indices, 0, 0..1);
}
}
pub fn render<'rpass>(&'rpass self, render_pass: &mut wgpu::RenderPass<'rpass>) {
// I *know* I'm gonna forget
if !self
.prepared
.swap(false, std::sync::atomic::Ordering::SeqCst)
{
log::error!("`WorldRenderer::prepare` was not called, so no data can be updated.");
}
if let Some((w, h)) = camera.update_aspect(window.inner_size()) {
prepared: AtomicBool::new(false),
width: MAX_DOCUMENT_SIZE.x,
height: MAX_DOCUMENT_SIZE.y,
let mut composer = Self::create_composer()?;
pub fn new(
// let hdr_module_src = include_str!("../../hdr.wgsl");
// let (name, imports, shader_defs) = compose::get_preprocessor_data(hdr_module_src);
// composer.add_composable_module(compose::ComposableModuleDescriptor {
// as_name: name,
// source: hdr_module_src,
// file_path: "hdr.wgsl",
// language: compose::ShaderLanguage::Wgsl,
// additional_imports: &imports,
// shader_defs,
// })?;
fn create_composer() -> Result<compose::Composer, compose::ComposerError> {
let composer = compose::Composer::default();
/// All document cells must fit within this area
pub const MAX_DOCUMENT_SIZE: glam::UVec2 = glam::UVec2::new(2048, 2048);
impl DocumentRenderer {
prepared: AtomicBool,
pub struct DocumentRenderer {
use crate::{
document::Document,
screen::{Camera, RenderOptions},
};
use std::{collections::HashMap, sync::atomic::AtomicBool};
use std::{collections::HashMap, sync::atomic::AtomicBool};
use encase::{ShaderType, UniformBuffer};
use naga_oil::compose;
use wgpu::util::DeviceExt;
use winit::window::Window;
use crate::{
document::Document,
screen::{Camera, RenderOptions},
};
/// Information sent to the shader about our camera
#[derive(ShaderType)]
struct CameraUniform {
view_proj: glam::Mat4,
}
impl CameraUniform {
fn new() -> Self {
Self {
view_proj: glam::Mat4::IDENTITY,
}
}
fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix();
}
}
pub struct DocumentRenderer {
// world_texture: wgpu::Texture,
world_texture_view: wgpu::TextureView,
// world_texture_sampler: wgpu::Sampler,
world_texture_bind_group: wgpu::BindGroup,
camera_uniform: CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
render_options_buffer: wgpu::Buffer,
render_options_bind_group: wgpu::BindGroup,
/// Renders from the world texture to screen space within the bounds of the camera
// TODO eventually handle screen space effects
world_render_pipeline: wgpu::RenderPipeline,
// num_world_vertices: u32,
num_world_indices: u32,
world_vertex_buffer: wgpu::Buffer,
world_index_buffer: wgpu::Buffer,
prepared: AtomicBool,
}
/// Most general type of vertex
/// Specifies a position, then tex coordinates, then an associated color (w/ alpha)
#[repr(C)]
#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]
struct Vertex {
position: [f32; 3],
tex_coords: [f32; 2],
color: [f32; 4],
}
impl Vertex {
fn desc() -> wgpu::VertexBufferLayout<'static> {
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
shader_location: 1,
format: wgpu::VertexFormat::Float32x2,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 5]>() as wgpu::BufferAddress,
shader_location: 2,
format: wgpu::VertexFormat::Float32x4,
},
],
}
}
}
/// All document cells must fit within this area
pub const MAX_DOCUMENT_SIZE: glam::UVec2 = glam::UVec2::new(2048, 2048);
impl DocumentRenderer {
const INDICES: &'static [u16] = &[0, 2, 3, 0, 1, 2];
/// `w` and `h` are percentages of the screen to use to render the world, (0.0, 1.0]
fn generate_world_vertices(w: f32, h: f32) -> Vec<Vertex> {
let verts = vec![
Vertex {
position: [-w, -h, 0.0],
tex_coords: [0.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [w, -h, 0.0],
tex_coords: [1.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [w, h, 0.0],
tex_coords: [1.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [-w, h, 0.0],
tex_coords: [0.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
];
verts
}
fn create_composer() -> Result<compose::Composer, compose::ComposerError> {
let composer = compose::Composer::default();
// HDR module
// let hdr_module_src = include_str!("../../hdr.wgsl");
// let (name, imports, shader_defs) = compose::get_preprocessor_data(hdr_module_src);
// composer.add_composable_module(compose::ComposableModuleDescriptor {
// as_name: name,
// source: hdr_module_src,
// file_path: "hdr.wgsl",
// language: compose::ShaderLanguage::Wgsl,
// additional_imports: &imports,
// shader_defs,
// })?;
Ok(composer)
}
pub fn world_texture_view(&self) -> &wgpu::TextureView {
&self.world_texture_view
}
pub fn new(
device: &wgpu::Device,
texture_format: wgpu::TextureFormat,
world_texture_format: wgpu::TextureFormat,
) -> color_eyre::Result<Self> {
let _ = device;
let _ = texture_format;
let num_world_indices = Self::INDICES.len() as u32;
let mut composer = Self::create_composer()?;
let world_vertices = Self::generate_world_vertices(1.0, 1.0);
// let num_world_vertices = world_vertices.len() as u32;
let world_vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(&world_vertices),
usage: wgpu::BufferUsages::VERTEX | wgpu::BufferUsages::COPY_DST,
});
let world_index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(Self::INDICES),
usage: wgpu::BufferUsages::INDEX,
});
let world_texture = device.create_texture(&wgpu::TextureDescriptor {
size: wgpu::Extent3d {
width: MAX_DOCUMENT_SIZE.x,
height: MAX_DOCUMENT_SIZE.y,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: world_texture_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
label: Some("world texture"),
view_formats: &[],
});
let world_texture_view = world_texture.create_view(&wgpu::TextureViewDescriptor::default());
let world_texture_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
let world_texture_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("world texture bind group layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
],
});
let world_texture_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("world texture bind group"),
layout: &world_texture_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&world_texture_view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&world_texture_sampler),
},
],
});
let camera_uniform = CameraUniform::new();
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&camera_uniform).unwrap();
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("camera buffer"),
contents: &uniform_buffer.into_inner(),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let camera_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("camera bind group layout"),
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("camera bind group"),
});
let render_options = RenderOptions::default();
let mut render_options_uniform_buffer = UniformBuffer::new(Vec::new());
render_options_uniform_buffer
.write(&render_options)
.unwrap();
let render_options_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("render options buffer"),
contents: &render_options_uniform_buffer.into_inner(),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let render_options_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: Some("render options bind group layout"),
});
let render_options_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("render options bind group"),
layout: &render_options_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: render_options_buffer.as_entire_binding(),
}],
});
let composed_world_shader = composer.make_naga_module(compose::NagaModuleDescriptor {
source: include_str!("../../world_shader.wgsl"),
file_path: "world_shader.wgsl",
shader_type: compose::ShaderType::Wgsl,
shader_defs: HashMap::new(),
additional_imports: &[],
})?;
let mut validator = naga::valid::Validator::new(
naga::valid::ValidationFlags::default(),
naga::valid::Capabilities::default(),
);
let composed_world_shader_module_info = validator.validate(&composed_world_shader)?;
let shader_source = naga::back::wgsl::write_string(
&composed_world_shader,
&composed_world_shader_module_info,
naga::back::wgsl::WriterFlags::EXPLICIT_TYPES,
)?;
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("world shader"),
source: wgpu::ShaderSource::Wgsl(shader_source.into()),
});
let world_render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("world render pipeline layout"),
bind_group_layouts: &[
&world_texture_bind_group_layout,
&camera_bind_group_layout,
&render_options_bind_group_layout,
],
push_constant_ranges: &[],
});
let world_render_pipeline =
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("world render pipeline"),
layout: Some(&world_render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[Vertex::desc()],
},
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
polygon_mode: wgpu::PolygonMode::Fill,
unclipped_depth: false,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: texture_format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
multiview: None,
});
Ok(Self {
world_texture_view,
world_texture_bind_group,
camera_uniform,
camera_buffer,
camera_bind_group,
render_options_buffer,
render_options_bind_group,
world_render_pipeline,
num_world_indices,
world_vertex_buffer,
world_index_buffer,
prepared: AtomicBool::new(false),
})
}
pub fn prepare(
&mut self,
_device: &wgpu::Device,
queue: &wgpu::Queue,
window: &Window,
camera: &mut Camera,
render_options: &RenderOptions,
_document: &Document,
) {
self.prepared
.store(true, std::sync::atomic::Ordering::SeqCst);
if let Some((w, h)) = camera.update_aspect(window.inner_size()) {
// Update the world vertex buffer
let new_world_vertices = Self::generate_world_vertices(w, h);
queue.write_buffer(
&self.world_vertex_buffer,
0,
bytemuck::cast_slice(&new_world_vertices),
);
}
// Update the camera and camera buffer
self.camera_uniform.update_view_proj(camera);
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&self.camera_uniform).unwrap();
queue.write_buffer(&self.camera_buffer, 0, &uniform_buffer.into_inner());
// Update render options
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(render_options).unwrap();
queue.write_buffer(&self.render_options_buffer, 0, &uniform_buffer.into_inner());
}
pub fn render<'rpass>(&'rpass self, render_pass: &mut wgpu::RenderPass<'rpass>) {
// I *know* I'm gonna forget
if !self
.prepared
.swap(false, std::sync::atomic::Ordering::SeqCst)
{
log::error!("`WorldRenderer::prepare` was not called, so no data could be updated.");
}
render_pass.set_pipeline(&self.world_render_pipeline);
render_pass.set_bind_group(0, &self.world_texture_bind_group, &[]);
render_pass.set_bind_group(1, &self.camera_bind_group, &[]);
render_pass.set_bind_group(2, &self.render_options_bind_group, &[]);
render_pass.set_vertex_buffer(0, self.world_vertex_buffer.slice(..));
render_pass.set_index_buffer(self.world_index_buffer.slice(..), wgpu::IndexFormat::Uint16);
render_pass.draw_indexed(0..self.num_world_indices, 0, 0..1);
}
}
use crate::{
screen::{Camera, RenderError, RenderInput, RenderOptions, Screen},
text::{TextSystem, TextSystemData},
};
use winit::{
event::{ElementState, KeyEvent, WindowEvent},
keyboard::{Key, NamedKey},
window::Window,
};
pub struct Document {
cell_width: u32,
cell_height: u32,
}
pub struct Project {
screen: Screen,
egui: egui::Context,
egui_winit: egui_winit::State,
internal_state: ProjectState,
text: TextSystem,
// internal controls
show_debug: bool,
}
pub struct ProjectState {
pub camera: Camera,
pub render_options: RenderOptions,
pub document: Document,
}
impl Project {
pub async fn new(window: Window) -> color_eyre::Result<Self> {
let camera = Camera::default();
let render_options = RenderOptions::default();
// The *only* time we need to manually do this, all others should use extract_view!
let egui = egui::Context::default();
let egui_winit =
egui_winit::State::new(egui.clone(), egui::ViewportId::ROOT, &window, None, None);
let screen = Screen::new(window).await?;
let document = Document {
cell_width: 100,
cell_height: 100,
};
Ok(Self {
egui,
egui_winit,
screen,
// systems
text: TextSystem::new()?,
show_debug: false,
internal_state: ProjectState {
camera,
render_options,
document,
},
})
}
pub async fn create_surface(&mut self) -> color_eyre::Result<()> {
self.screen.create_surface().await
}
pub fn screen(&self) -> &Screen {
&self.screen
}
pub fn screen_mut(&mut self) -> &mut Screen {
&mut self.screen
}
/// Runs egui code
pub fn gui_egui(&mut self) -> (egui::FullOutput, Vec<egui::ClippedPrimitive>) {
egui_winit::update_viewport_info(
self.egui_winit
.egui_input_mut()
.viewports
.get_mut(&egui::ViewportId::ROOT)
.unwrap(),
&self.egui,
self.screen.window(),
);
let input = self.egui_winit.take_egui_input(self.screen.window());
let full_output = self.egui.run(input, |ctx| {
egui::Window::new("Debug Tools")
.open(&mut self.show_debug)
.show(ctx, |ui| {
ui.horizontal(|ui| {
let mut vsync = self.screen.is_vsync();
let text = if vsync { "yes" } else { "no" };
ui.label("VSync");
ui.toggle_value(&mut vsync, text);
self.screen.vsync(vsync);
});
ui.add(
egui::Slider::new(&mut self.internal_state.render_options.gamma, 0.0..=2.0)
.text("Gamma"),
);
});
});
self.egui_winit
.handle_platform_output(self.screen.window(), full_output.platform_output.clone());
let prims = self
.egui
.tessellate(full_output.shapes.clone(), full_output.pixels_per_point);
(full_output, prims)
}
/// Returns true if the event was handled
pub fn input(&mut self, event: &WindowEvent) -> bool {
#[cfg(debug_assertions)]
if matches!(
event,
WindowEvent::KeyboardInput {
event: KeyEvent {
logical_key: Key::Named(NamedKey::F1),
state: ElementState::Pressed,
repeat: false,
..
},
..
}
) {
self.show_debug = !self.show_debug;
return true;
}
false
}
pub fn update(&mut self, _dt: std::time::Duration) {
puffin::profile_function!();
// Run any in-progress tasks from the async executor
// self.executor.run_until_stall(&mut self.internal_state);
}
pub fn render(&mut self) -> Result<(), RenderError> {
puffin::profile_function!();
// Any resources only used for the rendering process go here
let (egui_output, paint_jobs) = self.gui_egui();
let text_data = TextSystemData::extract();
self.text.prepare(&text_data);
let camera_text_data_info: Vec<_> =
self.text.produce_camera(&text_data).into_iter().collect();
let camera_text_data = camera_text_data_info
.iter()
.map(|(buf, mapper)| mapper.produce(buf))
.collect();
let world_text_data_info: Vec<_> =
self.text.produce_world(&text_data).into_iter().collect();
let world_text_data = world_text_data_info
.iter()
.map(|(buf, mapper)| mapper.produce(buf))
.collect();
// Build the renderer input
let input = RenderInput {
paint_jobs: &paint_jobs,
textures_delta: &egui_output.textures_delta,
font_system: &mut self.text.fonts,
swash_cache: &mut self.text.swash,
camera: &mut self.internal_state.camera,
render_options: &self.internal_state.render_options,
camera_text_data,
world_text_data,
document: &self.internal_state.document,
};
self.screen.render(input)
}
// methods here might query the world to produce
// rendering data ready for Screen to use
}
//! Scenes describe all the resources required to display the current game state.
//!
//! Only 1 scene can be active at any given moment.
use winit::event::WindowEvent;
use super::GameView;
pub trait Mode {
/// Activates the scene
fn on_enter(&mut self, game: &mut GameView);
/// De-activates the scene
fn on_exit(&mut self, game: &mut GameView);
/// Scene input handing, true if handled
fn input(&mut self, event: &WindowEvent, game: &mut GameView) -> bool;
/// Updates the scene (and can return a SceneReference to go to another scene)
fn update(&mut self, dt: std::time::Duration, game: &mut GameView) -> Option<ModeReference>;
/// Renders to egui's gui
fn gui_egui(&mut self, ctx: &egui::Context, game: &mut GameView);
// Notice there's no "render" method? Yeah, the scene's responsibility is to adjust Game::world (and Game)
// so that when the renderer comes around to rendering the world, the desired state is present to be rendered.
}
/// Scenes that can be referenced
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ModeReference {
/// Initial scene that starts the game
Init,
}
/// Library of all scenes that exist
pub struct ModeLibrary;
impl ModeLibrary {
pub fn get_scene(&self, game: &mut GameView, scene: ModeReference) -> Box<dyn Mode + Send> {
let _ = game;
match scene {
ModeReference::Init => Box::<DefaultMode>::default(),
}
}
}
#[derive(Default)]
pub struct DefaultMode {
voop: String,
}
impl Mode for DefaultMode {
fn on_enter(&mut self, game: &mut GameView<'_>) {
let _ = game;
// todo!()
}
fn on_exit(&mut self, game: &mut GameView<'_>) {
let _ = game;
todo!()
}
fn input(&mut self, event: &WindowEvent, game: &mut GameView<'_>) -> bool {
let _ = game;
let _ = event;
false
}
fn update(
&mut self,
dt: std::time::Duration,
game: &mut GameView<'_>,
) -> Option<ModeReference> {
let dt_secs = dt.as_secs_f32();
let negative = self.voop.starts_with('-');
let negativity = if negative { -1.0 } else { 1.0 };
if self.voop.to_lowercase().ends_with("camera") {
if self.voop.contains("rot") {
game.camera.transform *= glam::Affine3A::from_axis_angle(
glam::Vec3::Z,
negativity * std::f32::consts::FRAC_PI_4 * dt_secs,
);
} else {
game.camera.transform.translation.x += negativity * dt_secs * 0.0025;
}
}
None
}
fn gui_egui(&mut self, ctx: &egui::Context, game: &mut GameView) {
let _ = game;
egui::Window::new("gama")
.default_pos((10.0, 200.0))
.show(ctx, |ui| {
ui.label(">> Welcome to Cat Waiter <<");
if ui
.text_edit_singleline(&mut self.voop)
.on_hover_text("the best textbox of your life")
.changed()
{
log::info!(">> new text: {}", self.voop);
}
});
}
}