diff --git a/code/intermediate/tutorial11-normals/res/cobble-normal.png b/code/intermediate/tutorial11-normals/res/cobble-normal.png index b75fe812..61de39cd 100644 Binary files a/code/intermediate/tutorial11-normals/res/cobble-normal.png and b/code/intermediate/tutorial11-normals/res/cobble-normal.png differ diff --git a/code/intermediate/tutorial11-normals/src/resources.rs b/code/intermediate/tutorial11-normals/src/resources.rs index 28d1219a..a8d60bae 100644 --- a/code/intermediate/tutorial11-normals/src/resources.rs +++ b/code/intermediate/tutorial11-normals/src/resources.rs @@ -162,7 +162,9 @@ pub async fn load_model( // the solution! let r = 1.0 / (delta_uv1.x * delta_uv2.y - delta_uv1.y * delta_uv2.x); let tangent = (delta_pos1 * delta_uv2.y - delta_pos2 * delta_uv1.y) * r; - let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * r; + // We flip the bitangent to enable right-handed normal + // maps with wgpu texture coordinate system + let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * -r; // We'll use the same tangent/bitangent for each vertex in the triangle vertices[c[0] as usize].tangent = diff --git a/code/intermediate/tutorial12-camera/res/cobble-normal.png b/code/intermediate/tutorial12-camera/res/cobble-normal.png index b75fe812..61de39cd 100644 Binary files a/code/intermediate/tutorial12-camera/res/cobble-normal.png and b/code/intermediate/tutorial12-camera/res/cobble-normal.png differ diff --git a/code/intermediate/tutorial12-camera/src/resources.rs b/code/intermediate/tutorial12-camera/src/resources.rs index 28d1219a..a8d60bae 100644 --- a/code/intermediate/tutorial12-camera/src/resources.rs +++ b/code/intermediate/tutorial12-camera/src/resources.rs @@ -162,7 +162,9 @@ pub async fn load_model( // the solution! let r = 1.0 / (delta_uv1.x * delta_uv2.y - delta_uv1.y * delta_uv2.x); let tangent = (delta_pos1 * delta_uv2.y - delta_pos2 * delta_uv1.y) * r; - let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * r; + // We flip the bitangent to enable right-handed normal + // maps with wgpu texture coordinate system + let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * -r; // We'll use the same tangent/bitangent for each vertex in the triangle vertices[c[0] as usize].tangent = diff --git a/code/intermediate/tutorial13-threading/res/cobble-normal.png b/code/intermediate/tutorial13-threading/res/cobble-normal.png index b75fe812..61de39cd 100644 Binary files a/code/intermediate/tutorial13-threading/res/cobble-normal.png and b/code/intermediate/tutorial13-threading/res/cobble-normal.png differ diff --git a/code/intermediate/tutorial13-threading/src/resources.rs b/code/intermediate/tutorial13-threading/src/resources.rs index 95bdc533..9748884f 100644 --- a/code/intermediate/tutorial13-threading/src/resources.rs +++ b/code/intermediate/tutorial13-threading/src/resources.rs @@ -2,8 +2,6 @@ use std::io::{BufReader, Cursor}; use cfg_if::cfg_if; use wgpu::util::DeviceExt; -#[cfg(not(target_arch="wasm32"))] -use rayon::iter::IntoParallelIterator; use crate::{model, texture}; @@ -164,7 +162,9 @@ pub async fn load_model( // the solution! let r = 1.0 / (delta_uv1.x * delta_uv2.y - delta_uv1.y * delta_uv2.x); let tangent = (delta_pos1 * delta_uv2.y - delta_pos2 * delta_uv1.y) * r; - let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * r; + // We flip the bitangent to enable right-handed normal + // maps with wgpu texture coordinate system + let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * -r; // We'll use the same tangent/bitangent for each vertex in the triangle vertices[c[0] as usize].tangent = diff --git a/docs/beginner/tutorial2-surface/README.md b/docs/beginner/tutorial2-surface/README.md index 33b0385e..4b9e84bf 100644 --- a/docs/beginner/tutorial2-surface/README.md +++ b/docs/beginner/tutorial2-surface/README.md @@ -462,11 +462,11 @@ wgpu::RenderPassColorAttachment { } ``` -The `RenderPassColorAttachment` has the `view` field which informs `wgpu` what texture to save the colors to. In this case we specify `frame.view` that we created using `surface.get_current_texture()`. This means that any colors we draw to this attachment will get drawn to the screen. +The `RenderPassColorAttachment` has the `view` field which informs `wgpu` what texture to save the colors to. In this case we specify `view` that we created using `surface.get_current_texture()`. This means that any colors we draw to this attachment will get drawn to the screen. The `resolve_target` is the texture that will receive the resolved output. This will be the same as `view` unless multisampling is enabled. We don't need to specify this, so we leave it as `None`. -The `ops` field takes a `wpgu::Operations` object. This tells wgpu what to do with the colors on the screen (specified by `frame.view`). The `load` field tells wgpu how to handle colors stored from the previous frame. Currently, we are clearing the screen with a bluish color. The `store` field tells wgpu whether we want to store the rendered results to the `Texture` behind our `TextureView` (in this case it's the `SurfaceTexture`). We use `true` as we do want to store our render results. There are cases when you wouldn't want to but those +The `ops` field takes a `wpgu::Operations` object. This tells wgpu what to do with the colors on the screen (specified by `view`). The `load` field tells wgpu how to handle colors stored from the previous frame. Currently, we are clearing the screen with a bluish color. The `store` field tells wgpu whether we want to store the rendered results to the `Texture` behind our `TextureView` (in this case it's the `SurfaceTexture`). We use `true` as we do want to store our render results. There are cases when you wouldn't want to but those
diff --git a/docs/beginner/tutorial3-pipeline/README.md b/docs/beginner/tutorial3-pipeline/README.md index 6a90fb88..e491811a 100644 --- a/docs/beginner/tutorial3-pipeline/README.md +++ b/docs/beginner/tutorial3-pipeline/README.md @@ -196,11 +196,11 @@ let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescrip // continued ... ``` -Two things to note here: +Several things to note here: 1. Here you can specify which function inside the shader should be the `entry_point`. These are the functions we marked with `[[stage(vertex)]]` and `[[stage(fragment)]]` 2. The `buffers` field tells `wgpu` what type of vertices we want to pass to the vertex shader. We're specifying the vertices in the vertex shader itself, so we'll leave this empty. We'll put something there in the next tutorial. 3. The `fragment` is technically optional, so you have to wrap it in `Some()`. We need it if we want to store color data to the `surface`. -4. The `targets` field tells `wgpu` what color outputs it should set up.Currently, we only need one for the `surface`. We use the `surface`'s format so that copying to it is easy, and we specify that the blending should just replace old pixel data with new data. We also tell `wgpu` to write to all colors: red, blue, green, and alpha. *We'll talk more about*`color_state` *when we talk about textures.* +4. The `targets` field tells `wgpu` what color outputs it should set up. Currently, we only need one for the `surface`. We use the `surface`'s format so that copying to it is easy, and we specify that the blending should just replace old pixel data with new data. We also tell `wgpu` to write to all colors: red, blue, green, and alpha. *We'll talk more about* `color_state` *when we talk about textures.* ```rust primitive: wgpu::PrimitiveState { diff --git a/docs/beginner/tutorial4-buffer/README.md b/docs/beginner/tutorial4-buffer/README.md index 6e42ffe3..4e49c514 100644 --- a/docs/beginner/tutorial4-buffer/README.md +++ b/docs/beginner/tutorial4-buffer/README.md @@ -185,8 +185,8 @@ wgpu::VertexBufferLayout { } ``` -While this is definitely nice, Rust sees the result of `vertex_attr_array` is a temporary value, so a tweak is required to return it from a function. We could change the lifetime on `wgpu::VertexBufferLayout` to `'static`, or [make it `const`](https://github.com/gfx-rs/wgpu/discussions/1790#discussioncomment-1160378). You can see an example below: - +While this is definitely nice, Rust sees the result of `vertex_attr_array` as a temporary value, so a tweak is required to return it from a function. We could change the lifetime on `wgpu::VertexBufferLayout` to `'static`, or [make it `const`](https://github.com/gfx-rs/wgpu/discussions/1790#discussioncomment-1160378). You can see an example below: + ```rust impl Vertex { const ATTRIBS: [wgpu::VertexAttribute; 2] = diff --git a/docs/beginner/tutorial5-textures/README.md b/docs/beginner/tutorial5-textures/README.md index 906039ec..b5d78b3a 100644 --- a/docs/beginner/tutorial5-textures/README.md +++ b/docs/beginner/tutorial5-textures/README.md @@ -444,7 +444,7 @@ log = "0.4" pollster = "0.2" wgpu = "0.12" bytemuck = { version = "1.4", features = [ "derive" ] } -anyhow = "1.0" // NEW! +anyhow = "1.0" # NEW! ``` Then, in a new file called `src/texture.rs`, add the following: diff --git a/docs/beginner/tutorial6-uniforms/README.md b/docs/beginner/tutorial6-uniforms/README.md index 29f736b2..306fa20c 100644 --- a/docs/beginner/tutorial6-uniforms/README.md +++ b/docs/beginner/tutorial6-uniforms/README.md @@ -234,11 +234,10 @@ Modify the vertex shader to include the following. ```wgsl // Vertex shader - // 1. struct CameraUniform { view_proj: mat4x4; }; -[[group(1), binding(0)]] // 2. +[[group(1), binding(0)]] // 1. var camera: CameraUniform; struct VertexInput { @@ -257,14 +256,13 @@ fn vs_main( ) -> VertexOutput { var out: VertexOutput; out.tex_coords = model.tex_coords; - out.clip_position = camera.view_proj * vec4(model.position, 1.0); // 3. + out.clip_position = camera.view_proj * vec4(model.position, 1.0); // 2. return out; } ``` -1. According to the [WGSL Spec](https://gpuweb.github.io/gpuweb/wgsl/), The block decorator indicates this structure type represents the contents of a buffer resource occupying a single binding slot in the shader’s resource interface. Any structure used as a `uniform` must be annotated with `[[block]]` -2. Because we've created a new bind group, we need to specify which one we're using in the shader. The number is determined by our `render_pipeline_layout`. The `texture_bind_group_layout` is listed first, thus it's `group(0)`, and `camera_bind_group` is second, so it's `group(1)`. -3. Multiplication order is important when it comes to matrices. The vector goes on the right, and the matrices gone on the left in order of importance. +1. Because we've created a new bind group, we need to specify which one we're using in the shader. The number is determined by our `render_pipeline_layout`. The `texture_bind_group_layout` is listed first, thus it's `group(0)`, and `camera_bind_group` is second, so it's `group(1)`. +2. Multiplication order is important when it comes to matrices. The vector goes on the right, and the matrices gone on the left in order of importance. ## A controller for our camera diff --git a/docs/intermediate/tutorial11-normals/README.md b/docs/intermediate/tutorial11-normals/README.md index 1c291289..cbd72062 100644 --- a/docs/intermediate/tutorial11-normals/README.md +++ b/docs/intermediate/tutorial11-normals/README.md @@ -57,10 +57,7 @@ let texture_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroup wgpu::BindGroupLayoutEntry { binding: 3, visibility: wgpu::ShaderStages::FRAGMENT, - ty: wgpu::BindingType::Sampler { - comparison: false, - filtering: true, - }, + ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), count: None, }, ], @@ -139,6 +136,7 @@ fn fs_main(in: VertexOutput) -> [[location(0)]] vec4 { let tangent_normal = object_normal.xyz * 2.0 - 1.0; let light_dir = normalize(light.position - in.world_position); let view_dir = normalize(camera.view_pos.xyz - in.world_position); + let half_dir = normalize(view_dir + light_dir); let diffuse_strength = max(dot(tangent_normal, light_dir), 0.0); let diffuse_color = light.color * diffuse_strength; @@ -296,7 +294,9 @@ impl Model { // the solution! let r = 1.0 / (delta_uv1.x * delta_uv2.y - delta_uv1.y * delta_uv2.x); let tangent = (delta_pos1 * delta_uv2.y - delta_pos2 * delta_uv1.y) * r; - let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * r; + // We flip the bitangent to enable right-handed normal + // maps with wgpu texture coordinate system + let bitangent = (delta_pos2 * delta_uv1.x - delta_pos1 * delta_uv2.x) * -r; // We'll use the same tangent/bitangent for each vertex in the triangle vertices[c[0] as usize].tangent = diff --git a/docs/intermediate/tutorial13-threading/README.md b/docs/intermediate/tutorial13-threading/README.md index 400995b1..6c57c1a6 100644 --- a/docs/intermediate/tutorial13-threading/README.md +++ b/docs/intermediate/tutorial13-threading/README.md @@ -126,7 +126,7 @@ impl Model { } ``` -We've parallelized loading the meshes, and making the vertex array for them. Propably a bit overkill, but `rayon` should prevent us from using too many threads. +We've parallelized loading the meshes, and making the vertex array for them. Probably a bit overkill, but `rayon` should prevent us from using too many threads.