GHIHJCWBCGFZI4AG66L7WG4WKJ467DXFBRBNLT4ZULSN7ZVBJI6AC
GROTV3H2V6BHU5CA7WSLVYDCWAR7GHUP5JV2QOGPLJ6N3553T4MAC
L6RIUKGLJZLAOKFGUDTZKBPP4HUBPEZAKHJEQHO34WFF62AB2ZIQC
X5EMQBC4BFNJOOHS2QMF4UB4QH6JKSALK6RXLK6B7SP6MNYSAP6QC
KMU4E426CB2NDRJEK7B4GSL22P62CJPPZRK45X3JV5WP5NIBIGRQC
WQIQA2TNWNSNGKQC22XPTECAZEWG6PG5QWQECUVXDUQRSCV6RUZQC
Q6Z5IQJ3SUI7BUJCKT377EPIRO7UVZFI7UB74ZEEGTCTUKC7WGKQC
// Shader to draw the world texture onto the window
struct CameraUniform {
view_proj: mat4x4<f32>,
};
@group(1) @binding(0) // 1.
var<uniform> camera: CameraUniform;
struct VertexInput {
@location(0) position: vec3<f32>,
@location(1) tex_coords: vec2<f32>,
@location(2) color: vec4<f32>,
}
struct VertexOutput {
@builtin(position) clip_position: vec4<f32>,
@location(0) tex_coords: vec2<f32>,
@location(1) color: vec4<f32>,
}
@vertex
fn vs_main(model: VertexInput) -> VertexOutput {
var out: VertexOutput;
out.tex_coords = (camera.view_proj * vec4<f32>(model.tex_coords, 0.0, 1.0)).xy;
out.clip_position = vec4<f32>(model.position.xyz, 1.0);
// out.clip_position = vec4<f32>(model.position.xy, 0.0, 1.0);
// out.color = vec4<f32>(out.clip_position);
out.color = model.color;
return out;
}
@group(0) @binding(0)
var t_world: texture_2d<f32>;
@group(0) @binding(1)
var s_world: sampler;
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
// return in.color;
return in.color * textureSample(t_world, s_world, in.tex_coords);
// return in.color + textureSample(t_world, s_world, in.tex_coords);
}
vec![todo!()]
let metrics = Metrics::new(32.0, 20.0);
let mut buffer = Buffer::new(&mut self.fonts, metrics);
let screen_to_world = (glam::Affine3A::from_translation(glam::Vec3::new(
-(WORLD_HALF_EXTENTS.x as f32),
-(WORLD_HALF_EXTENTS.y as f32),
0.0,
)) * glam::Affine3A::from_scale(glam::Vec3::new(
2.0 * WORLD_HALF_EXTENTS.x as f32,
2.0 * WORLD_HALF_EXTENTS.y as f32,
1.0,
)))
.inverse();
{
let mut buffer = buffer.borrow_with(&mut self.fonts);
buffer.set_size(200.0, 25.0);
let attrs = Attrs::new().family(glyphon::Family::Name("Poiret One"));
buffer.set_text("Hello, game world! 🦀\n", attrs, Shaping::Advanced);
buffer.shape_until_scroll();
}
let loc = screen_to_world.transform_point3(glam::Vec3::new(0.0, 0.0, 0.0))
* 2.0
* WORLD_HALF_EXTENTS.as_vec2().extend(0.0);
vec![(buffer, BufferMapper::new(loc.xy(), 1.0))]
camera_uniform: CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
/// Renders from the world texture to screen space within the bounds of the camera
// TODO eventually handle screen space effects
world_render_pipeline: wgpu::RenderPipeline,
// num_world_vertices: u32,
num_world_indices: u32,
world_vertex_buffer: wgpu::Buffer,
world_index_buffer: wgpu::Buffer,
glyphon: glyphon::TextRenderer,
atlas: glyphon::TextAtlas,
world_atlas: glyphon::TextAtlas,
camera_atlas: glyphon::TextAtlas,
}
/// Defines maximum possible size of the world for rendering
///
/// The rendered coords are x in [-WHE.x, WHE.x] and y in [-WHE.y, WHE.y]
pub const WORLD_HALF_EXTENTS: glam::UVec2 = glam::UVec2::new(1024, 1024);
/// Controls how the game world is displayed in the window
pub struct Camera {
pub transform: glam::Affine3A,
/// camera width (in game pixels)
pub width: f32,
/// camera height (in game pixels)
pub height: f32,
}
impl Default for Camera {
fn default() -> Self {
Self {
transform: glam::Affine3A::default(),
width: 400.0,
height: 400.0,
}
}
}
impl Camera {
// You don't know how much blood, sweat, and tears (BST)
// I put into this fucking function when someone deeeefinitely
// has figured this out
//
// Google is dead, long live Google
pub(crate) fn build_view_projection_matrix(&self) -> glam::Mat4 {
let view = self.transform;
let proj = glam::Affine3A::from_scale(glam::Vec3::new(
self.width / WORLD_HALF_EXTENTS.x as f32,
self.height / (WORLD_HALF_EXTENTS.y as f32),
1.0,
)) * glam::Affine3A::from_translation(
(WORLD_HALF_EXTENTS.as_vec2() / glam::Vec2::new(self.width, self.height) / 2.0)
.extend(0.0),
);
(view * proj).into()
}
/// Information sent to the shader about our camera
#[derive(ShaderType)]
struct CameraUniform {
view_proj: glam::Mat4,
}
impl CameraUniform {
fn new() -> Self {
Self {
view_proj: glam::Mat4::IDENTITY,
}
}
fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix();
}
}
}
/// Most general type of vertex
/// Specifies a position, then tex coordinates, then an associated color (w/ alpha)
#[repr(C)]
#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]
struct Vertex {
position: [f32; 3],
tex_coords: [f32; 2],
color: [f32; 4],
}
impl Vertex {
fn desc() -> wgpu::VertexBufferLayout<'static> {
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &[
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,
shader_location: 1,
format: wgpu::VertexFormat::Float32x2,
},
wgpu::VertexAttribute {
offset: std::mem::size_of::<[f32; 5]>() as wgpu::BufferAddress,
shader_location: 2,
format: wgpu::VertexFormat::Float32x4,
},
],
}
}
const INDICES: &'static [u16] = &[0, 2, 3, 0, 1, 2];
fn generate_world_vertices() -> Vec<Vertex> {
let verts = vec![
Vertex {
position: [-1.0, -1.0, 0.0],
tex_coords: [0.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [1.0, -1.0, 0.0],
tex_coords: [1.0, 1.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [1.0, 1.0, 0.0],
tex_coords: [1.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
Vertex {
position: [-1.0, 1.0, 0.0],
tex_coords: [0.0, 0.0],
color: [1.0, 1.0, 1.0, 1.0],
},
];
verts
}
let mut atlas = glyphon::TextAtlas::new(&device, &queue, surface_format);
let glyphon = glyphon::TextRenderer::new(
&mut atlas,
&device,
wgpu::MultisampleState::default(),
None,
);
let world_atlas = glyphon::TextAtlas::new(&device, &queue, surface_format);
let camera_atlas = glyphon::TextAtlas::new(&device, &queue, surface_format);
let world_vertices = Self::generate_world_vertices();
// let num_world_vertices = world_vertices.len() as u32;
let world_vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(&world_vertices),
usage: wgpu::BufferUsages::VERTEX,
});
let world_index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("world vertex buffer"),
contents: bytemuck::cast_slice(Self::INDICES),
usage: wgpu::BufferUsages::INDEX,
});
let world_texture = device.create_texture(&wgpu::TextureDescriptor {
size: wgpu::Extent3d {
width: WORLD_HALF_EXTENTS.x * 2,
height: WORLD_HALF_EXTENTS.y * 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
// Use the same format as the surface we will have to write to...
format: surface_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
label: Some("world texture"),
view_formats: &[],
});
let world_texture_view = world_texture.create_view(&wgpu::TextureViewDescriptor::default());
let world_texture_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
let world_texture_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("world texture bind group layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
],
});
let world_texture_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("world texture bind group"),
layout: &world_texture_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&world_texture_view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&world_texture_sampler),
},
],
});
let camera_uniform = CameraUniform::new();
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&camera_uniform).unwrap();
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("camera buffer"),
contents: &uniform_buffer.into_inner(),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let camera_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("camera bind group layout"),
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("camera bind group"),
});
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("world shader"),
source: wgpu::ShaderSource::Wgsl(include_str!("world_shader.wgsl").into()),
});
let world_render_pipeline_layout =
device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("world render pipeline layout"),
bind_group_layouts: &[&world_texture_bind_group_layout, &camera_bind_group_layout],
push_constant_ranges: &[],
});
let world_render_pipeline =
device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("world render pipeline"),
layout: Some(&world_render_pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[Vertex::desc()],
},
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
polygon_mode: wgpu::PolygonMode::Fill,
unclipped_depth: false,
conservative: false,
},
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format: config.format,
blend: Some(wgpu::BlendState::REPLACE),
write_mask: wgpu::ColorWrites::ALL,
})],
}),
multiview: None,
});
));
);
// Update the camera buffer
self.camera_uniform.update_view_proj(input.camera);
let mut uniform_buffer = UniformBuffer::new(Vec::new());
uniform_buffer.write(&self.camera_uniform).unwrap();
self.queue
.write_buffer(&self.camera_buffer, 0, &uniform_buffer.into_inner());
let world_resolution = glyphon::Resolution {
width: WORLD_HALF_EXTENTS.x * 2,
height: WORLD_HALF_EXTENTS.y * 2,
};
// A remenant of the war
let prepared_world_text_data: Vec<_> = input
.world_text_data
.into_iter()
.map(|ta| {
let scale_factor = (input.camera.width / input.camera.height)
/ (WORLD_HALF_EXTENTS.x as f32 / WORLD_HALF_EXTENTS.y as f32);
glyphon::TextArea {
scale: ta.scale * scale_factor.recip() as f32,
..ta
}
})
.collect();
let mut world_glyphon = glyphon::TextRenderer::new(
&mut self.world_atlas,
&self.device,
wgpu::MultisampleState::default(),
None,
);
// TODO
match world_glyphon.prepare(
&self.device,
&self.queue,
input.font_system,
&mut self.world_atlas,
world_resolution,
prepared_world_text_data.clone(),
input.swash_cache,
) {
Ok(()) => {}
Err(glyphon::PrepareError::AtlasFull) => {
// TODO Retry after executing atlas.trim, which removes all glyph claims iigc
log::error!("failed to render world level text, giving up...");
}
}
// let mut _world_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
// label: Some("World Render Pass"),
// color_attachments: &[],
// depth_stencil_attachment: None,
// timestamp_writes: None,
// occlusion_query_set: None,
// });
let mut world_render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("World Render Pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &self.world_texture_view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color {
r: 0.0,
g: 0.0,
b: 0.0,
a: 1.0,
}),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
world_glyphon.render(&self.world_atlas, &mut world_render_pass)?;
// Try on a completely empty atlas
let mut new_atlas =
glyphon::TextAtlas::new(&self.device, &self.queue, self.config.format);
if self
.glyphon
.prepare(
&self.device,
&self.queue,
input.font_system,
&mut new_atlas,
screen_resolution,
input.camera_text_data,
input.swash_cache,
)
.is_ok()
{
self.atlas = new_atlas;
} else {
log::error!(
"failed to render camera level text with a reinitialized atlas, giving up..."
);
}
// TODO retry after executing atlas.trim
log::error!("failed to render camera level text, giving up...");
self.glyphon.render(&self.atlas, &mut final_render_pass)?;
final_render_pass.set_pipeline(&self.world_render_pipeline);
final_render_pass.set_bind_group(0, &self.world_texture_bind_group, &[]);
final_render_pass.set_bind_group(1, &self.camera_bind_group, &[]);
final_render_pass.set_vertex_buffer(0, self.world_vertex_buffer.slice(..));
final_render_pass
.set_index_buffer(self.world_index_buffer.slice(..), wgpu::IndexFormat::Uint16);
final_render_pass.draw_indexed(0..self.num_world_indices, 0, 0..1);
camera_glyphon.render(&self.camera_atlas, &mut final_render_pass)?;
// egui::SidePanel::left("gama").show(ctx, |ui| {
// ui.label(">> Welcome to Cat Waiter <<");
// if ui
// .text_edit_singleline(&mut self.voop)
// .on_hover_text("the best textbox of your life")
// .changed()
// {
// log::info!(">> new text: {}", self.voop);
// }
// });
egui::Window::new("gama").show(ctx, |ui| {
ui.label(">> Welcome to Cat Waiter <<");
if ui
.text_edit_singleline(&mut self.voop)
.on_hover_text("the best textbox of your life")
.changed()
{
log::info!(">> new text: {}", self.voop);
}
});
name = "encase_derive"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4ce1449c7d19eba6cc0abd231150ad81620a8dce29601d7f8d236e5d431d72a"
dependencies = [
"encase_derive_impl",
]
[[package]]
name = "encase_derive_impl"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92959a9e8d13eaa13b8ae8c7b583c3bf1669ca7a8e7708a088d12587ba86effc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.48",
]
[[package]]
*.cap