fixed non-srgb on hdr tutorial

0.19
Benjamin Hansen 4 weeks ago
parent 3d27fca202
commit 09c1024a8f

@ -10,6 +10,7 @@ pub struct Texture {
impl Texture { impl Texture {
pub const DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth32Float; pub const DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth32Float;
#[allow(unused)]
pub fn create_depth_texture( pub fn create_depth_texture(
device: &wgpu::Device, device: &wgpu::Device,
config: &wgpu::SurfaceConfiguration, config: &wgpu::SurfaceConfiguration,

@ -79,7 +79,7 @@ impl HdrPipeline {
let pipeline = create_render_pipeline( let pipeline = create_render_pipeline(
device, device,
&pipeline_layout, &pipeline_layout,
config.format, config.format.add_srgb_suffix(),
None, None,
&[], &[],
wgpu::PrimitiveTopology::TriangleList, wgpu::PrimitiveTopology::TriangleList,

@ -295,7 +295,8 @@ impl<'a> State<'a> {
height: size.height, height: size.height,
present_mode: surface_caps.present_modes[0], present_mode: surface_caps.present_modes[0],
alpha_mode: surface_caps.alpha_modes[0], alpha_mode: surface_caps.alpha_modes[0],
view_formats: vec![], // NEW!
view_formats: vec![surface_format.add_srgb_suffix()],
desired_maximum_frame_latency: 2, desired_maximum_frame_latency: 2,
}; };
@ -714,7 +715,10 @@ impl<'a> State<'a> {
let output = self.surface.get_current_texture()?; let output = self.surface.get_current_texture()?;
let view = output let view = output
.texture .texture
.create_view(&wgpu::TextureViewDescriptor::default()); .create_view(&wgpu::TextureViewDescriptor {
format: Some(self.config.format.add_srgb_suffix()),
..Default::default()
});
let mut encoder = self let mut encoder = self
.device .device

@ -284,10 +284,9 @@ impl HdrLoader {
dst_size: u32, dst_size: u32,
label: Option<&str>, label: Option<&str>,
) -> anyhow::Result<texture::CubeTexture> { ) -> anyhow::Result<texture::CubeTexture> {
log::info!("creating decoder");
let hdr_decoder = HdrDecoder::new(Cursor::new(data))?; let hdr_decoder = HdrDecoder::new(Cursor::new(data))?;
let meta = hdr_decoder.metadata(); let meta = hdr_decoder.metadata();
log::info!("reading image");
#[cfg(not(target_arch="wasm32"))] #[cfg(not(target_arch="wasm32"))]
let pixels = { let pixels = {
let mut pixels = vec![[0.0, 0.0, 0.0, 0.0]; meta.width as usize * meta.height as usize]; let mut pixels = vec![[0.0, 0.0, 0.0, 0.0]; meta.width as usize * meta.height as usize];
@ -309,7 +308,6 @@ impl HdrLoader {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
log::info!("creating texture");
let src = texture::Texture::create_2d_texture( let src = texture::Texture::create_2d_texture(
device, device,
meta.width, meta.width,
@ -320,7 +318,6 @@ impl HdrLoader {
None, None,
); );
log::info!("writing texture");
queue.write_texture( queue.write_texture(
wgpu::ImageCopyTexture { wgpu::ImageCopyTexture {
texture: &src.texture, texture: &src.texture,

@ -105,7 +105,7 @@ impl HdrPipeline {
let pipeline = create_render_pipeline( let pipeline = create_render_pipeline(
device, device,
&pipeline_layout, &pipeline_layout,
config.format, config.format.add_srgb_suffix(),
None, None,
// We'll use some math to generate the vertex data in // We'll use some math to generate the vertex data in
// the shader, so we don't need any vertex buffers // the shader, so we don't need any vertex buffers
@ -567,16 +567,27 @@ impl HdrLoader {
) -> anyhow::Result<texture::CubeTexture> { ) -> anyhow::Result<texture::CubeTexture> {
let hdr_decoder = HdrDecoder::new(Cursor::new(data))?; let hdr_decoder = HdrDecoder::new(Cursor::new(data))?;
let meta = hdr_decoder.metadata(); let meta = hdr_decoder.metadata();
let mut pixels = vec![[0.0, 0.0, 0.0, 0.0]; meta.width as usize * meta.height as usize];
hdr_decoder.read_image_transform( #[cfg(not(target_arch="wasm32"))]
|pix| { let pixels = {
// There's no Rgb32Float format, so we need let mut pixels = vec![[0.0, 0.0, 0.0, 0.0]; meta.width as usize * meta.height as usize];
// an extra float hdr_decoder.read_image_transform(
|pix| {
let rgb = pix.to_hdr();
[rgb.0[0], rgb.0[1], rgb.0[2], 1.0f32]
},
&mut pixels[..],
)?;
pixels
};
#[cfg(target_arch="wasm32")]
let pixels = hdr_decoder.read_image_native()?
.into_iter()
.map(|pix| {
let rgb = pix.to_hdr(); let rgb = pix.to_hdr();
[rgb.0[0], rgb.0[1], rgb.0[2], 1.0f32] [rgb.0[0], rgb.0[1], rgb.0[2], 1.0f32]
}, })
&mut pixels[..], .collect::<Vec<_>>();
)?;
let src = texture::Texture::create_2d_texture( let src = texture::Texture::create_2d_texture(
device, device,
@ -1201,6 +1212,48 @@ Here's the finished scene:
![with-reflections](./with-reflections.png) ![with-reflections](./with-reflections.png)
## Output too dark on WebGPU?
WebGPU doesn't support using sRGB texture formats as the
output for a surface. We can get around this by making the
texture view used to render use the sRGB version of the
format. To do this we need to change the surface config
we use to allow view formats with sRGB.
```rust
let config = wgpu::SurfaceConfiguration {
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
format: surface_format,
width: size.width,
height: size.height,
present_mode: surface_caps.present_modes[0],
alpha_mode: surface_caps.alpha_modes[0],
// NEW!
view_formats: vec![surface_format.add_srgb_suffix()],
desired_maximum_frame_latency: 2,
};
```
Then we need to create a view with sRGB enabled in
`State::render()`.
```rust
let view = output
.texture
.create_view(&wgpu::TextureViewDescriptor {
format: Some(self.config.format.add_srgb_suffix()),
..Default::default()
});
```
You may have noticed as well that in `HdrPipeline::new()`
we use `config.format.add_srgb_suffix()` when creating
the render pipeline. This is required as if we don't
the sRGB enabled `TextureView` won't work with the
render pipeline.
With that you should get the sRGB output as expected.
## Demo ## Demo
<div class="warn"> <div class="warn">

Loading…
Cancel
Save