In this post I provide a simple update I made on the equirectangular projection shader for usage in my NervLand engine. Check the youtube video for more details 😎!
Youtube video for this article available at:
Here is the current version of the WGSL shader I'm using for equirectangular env. mapping (obviously you will need to provide the expected niput bindings or adapt the code as needed to use it ):
#include "base_utils" @group(0) @binding(0) var<uniform> cam : StdCameraUBO; @group(0) @binding(1) var linSampler: sampler; @group(0) @binding(2) var equirectTex: texture_2d<f32>; struct VertexOutput { @builtin(position) Position: vec4<f32>, @location(0) viewDir: vec3<f32>, } @vertex fn main(input: StdVsInput) -> VertexOutput { // Get the clip UV coords: var uv: vec2f = get_quad_uv(input.vertexID); var output: VertexOutput; var pos = vec4(uv * 2.0 - 1.0, 1.0, 1.0); output.Position = pos; var unprojected = normalize((get_proj_mat_inverse(cam) * pos).xyz); output.viewDir = normalize((get_view_mat_inverse(cam) * vec4f(unprojected, 0.0)).xyz); return output; } @fragment fn main_fs(in: VertexOutput) -> @location(0) vec4<f32> { var viewDir = normalize(in.viewDir); viewDir = clamp(viewDir, vec3f(-1.0), vec3f(1.0)); // Our view convention is: // X: right // Y: down // Z: forward // So we can compute the lat/lon coordinates from the view direction: // and we normalize both in the range [-1,1] var lat: f32 = asin(-viewDir.y) * 2.0 / PI; var lon: f32 = atan2(viewDir.x, viewDir.z) / PI; // Next we compute the coords in the range [0,1]: var uv = vec2f(lon, lat) * 0.5 + vec2f(0.5, 0.5); // Sample the texture at level 0 explicitly since the uv // are discontinuous: var color = textureSampleLevel(equirectTex, linSampler, uv, 0).xyz; // var color = textureSample(equirectTex, linSampler, uv).xyz; return vec4f(color, 1.0); }