#include "common_uniforms" @group(0) @binding(0) var globals: GlobalUniforms; @group(0) @binding(1) var object_data: ObjectsBuffer; // Binding 2 is reserved for BVH buffer when enabled @group(0) @binding(3) var noise_tex: texture_2d; @group(0) @binding(4) var noise_sampler: sampler; @group(0) @binding(5) var sky_tex: texture_2d; struct VertexOutput { @builtin(position) position: vec4, @location(0) local_pos: vec3, @location(1) color: vec4, @location(2) @interpolate(flat) instance_index: u32, @location(3) world_pos: vec3, @location(4) transformed_normal: vec3, }; @vertex fn vs_main(@builtin(vertex_index) vertex_index: u32, @builtin(instance_index) instance_index: u32) -> VertexOutput { var pos = array, 36>( vec3(-1.0, -1.0, 1.0), vec3( 1.0, -1.0, 1.0), vec3( 1.0, 1.0, 1.0), vec3(-1.0, -1.0, 1.0), vec3( 1.0, 1.0, 1.0), vec3(-1.0, 1.0, 1.0), vec3(-1.0, -1.0, -1.0), vec3(-1.0, 1.0, -1.0), vec3( 1.0, 1.0, -1.0), vec3(-1.0, -1.0, -1.0), vec3( 1.0, 1.0, -1.0), vec3( 1.0, -1.0, -1.0), vec3(-1.0, 1.0, -1.0), vec3(-1.0, 1.0, 1.0), vec3( 1.0, 1.0, 1.0), vec3(-1.0, 1.0, -1.0), vec3( 1.0, 1.0, 1.0), vec3( 1.0, 1.0, -1.0), vec3(-1.0, -1.0, -1.0), vec3( 1.0, -1.0, -1.0), vec3( 1.0, -1.0, 1.0), vec3(-1.0, -1.0, -1.0), vec3( 1.0, -1.0, 1.0), vec3(-1.0, -1.0, 1.0), vec3( 1.0, -1.0, -1.0), vec3( 1.0, 1.0, -1.0), vec3( 1.0, 1.0, 1.0), vec3( 1.0, -1.0, -1.0), vec3( 1.0, 1.0, 1.0), vec3( 1.0, -1.0, 1.0), vec3(-1.0, -1.0, -1.0), vec3(-1.0, -1.0, 1.0), vec3(-1.0, 1.0, 1.0), vec3(-1.0, -1.0, -1.0), vec3(-1.0, 1.0, 1.0), vec3(-1.0, 1.0, -1.0) ); var p = pos[vertex_index]; let obj = object_data.objects[instance_index]; let obj_type = obj.params.x; if (obj_type == 5.0) { // MESH // For meshes, we use the actual vertex data, not proxy geometry. // The position here is a placeholder, the real mesh data is handled by mesh_pipeline_. var out: VertexOutput; out.position = vec4(0.0, 0.0, 2.0, 1.0); // Outside far plane, so it's not rendered by this pipeline. return out; } // Tight fit for Torus proxy hull (major radius 1.0, minor 0.4) if (obj_type == 3.0) { p.x = p.x * 1.5; p.z = p.z * 1.5; p.y = p.y * 0.5; } let world_pos = obj.model * vec4(p, 1.0); let clip_pos = globals.view_proj * world_pos; var out: VertexOutput; out.position = clip_pos; out.local_pos = p; out.color = obj.color; out.instance_index = instance_index; out.world_pos = world_pos.xyz; // Correct normal transformation for meshes: transpose of inverse of model matrix // For non-uniform scaling, this is necessary. For other primitives, we use their analytical normals. if (obj_type == 5.0) { // Calculate inverse transpose of the model matrix (upper 3x3 part) let model_matrix = mat3x3(obj.model[0].xyz, obj.model[1].xyz, obj.model[2].xyz); let normal_matrix = transpose(inverse(model_matrix)); out.transformed_normal = normalize(normal_matrix * in.normal); } else { // For SDF primitives, we don't use vertex normals directly here; they are computed in the fragment shader. // However, we still need to output a normal for the fragment shader to use if it were a rasterized primitive. // The transformed_normal is not used by the SDF fragment shader, but for correctness, we'll pass it. // If this were a rasterized mesh, it would be used. out.transformed_normal = normalize(vec3(0.0, 1.0, 0.0)); // Placeholder for non-mesh types } return out; } #include "render/scene_query_mode" #include "render/shadows" #include "render/lighting_utils" #include "ray_box" struct FragmentOutput { @location(0) color: vec4, @builtin(frag_depth) depth: f32, }; @fragment fn fs_main(in: VertexOutput) -> FragmentOutput { let obj = object_data.objects[in.instance_index]; let obj_type = obj.params.x; var p: vec3; var normal: vec3; var base_color = in.color.rgb; let light_dir = normalize(vec3(1.0, 1.0, 1.0)); if (obj_type <= 0.0) { // Raster path (legacy or generic) p = in.world_pos; // Use the transformed normal passed from the vertex shader for rasterized objects normal = normalize(in.transformed_normal); // Apply grid pattern to floor let uv = p.xz * 0.5; let grid = 0.5 + 0.5 * sin(uv.x * 3.14) * sin(uv.y * 3.14); let grid_val = smoothstep(0.45, 0.55, grid); base_color = base_color * (0.5 + 0.5 * grid_val); } else { // SDF path let ro_world = globals.camera_pos_time.xyz; let rd_world = normalize(in.world_pos - ro_world); // Ray-Box Intersection in local space to find tight bounds let ro_local = (obj.inv_model * vec4(ro_world, 1.0)).xyz; let rd_local = normalize((obj.inv_model * vec4(rd_world, 0.0)).xyz); // Proxy box extent (matches vs_main) // MESHES use obj.params.yzw for extent var extent = vec3(1.0); if (obj.params.x == 3.0) { extent = vec3(1.5, 0.5, 1.5); } // Torus else if (obj.params.x == 5.0) { extent = obj.params.yzw; } // MESH extent let bounds = ray_box_intersection(ro_local, rd_local, extent); if (!bounds.hit) { discard; } var t = bounds.t_entry; var hit = false; for (var i = 0; i < 64; i = i + 1) { let q = ro_local + rd_local * t; let d_local = get_dist(q, obj.params); if (d_local < 0.0005) { hit = true; break; } t = t + d_local; if (t > bounds.t_exit) { break; } } if (!hit) { discard; } let q_hit = ro_local + rd_local * t; p = (obj.model * vec4(q_hit, 1.0)).xyz; // Correct world position // Calculate normal with bump mapping let e = vec2(0.005, 0.0); let disp_strength = 0.05; let q_x1 = q_hit + e.xyy; let uv_x1 = vec2(atan2(q_x1.x, q_x1.z) / 6.28 + 0.5, acos(clamp(q_x1.y / length(q_x1), -1.0, 1.0)) / 3.14); let h_x1 = textureSample(noise_tex, noise_sampler, uv_x1).r; let d_x1 = get_dist(q_x1, obj.params) - disp_strength * h_x1; let q_x2 = q_hit - e.xyy; let uv_x2 = vec2(atan2(q_x2.x, q_x2.z) / 6.28 + 0.5, acos(clamp(q_x2.y / length(q_x2), -1.0, 1.0)) / 3.14); let h_x2 = textureSample(noise_tex, noise_sampler, uv_x2).r; let d_x2 = get_dist(q_x2, obj.params) - disp_strength * h_x2; let q_y1 = q_hit + e.yxy; let uv_y1 = vec2(atan2(q_y1.x, q_y1.z) / 6.28 + 0.5, acos(clamp(q_y1.y / length(q_y1), -1.0, 1.0)) / 3.14); let h_y1 = textureSample(noise_tex, noise_sampler, uv_y1).r; let d_y1 = get_dist(q_y1, obj.params) - disp_strength * h_y1; let q_y2 = q_hit - e.yxy; let uv_y2 = vec2(atan2(q_y2.x, q_y2.z) / 6.28 + 0.5, acos(clamp(q_y2.y / length(q_y2), -1.0, 1.0)) / 3.14); let h_y2 = textureSample(noise_tex, noise_sampler, uv_y2).r; let d_y2 = get_dist(q_y2, obj.params) - disp_strength * h_y2; let q_z1 = q_hit + e.yyx; let uv_z1 = vec2(atan2(q_z1.x, q_z1.z) / 6.28 + 0.5, acos(clamp(q_z1.y / length(q_z1), -1.0, 1.0)) / 3.14); let h_z1 = textureSample(noise_tex, noise_sampler, uv_z1).r; let d_z1 = get_dist(q_z1, obj.params) - disp_strength * h_z1; let q_z2 = q_hit - e.yyx; let uv_z2 = vec2(atan2(q_z2.x, q_z2.z) / 6.28 + 0.5, acos(clamp(q_z2.y / length(q_z2), -1.0, 1.0)) / 3.14); let h_z2 = textureSample(noise_tex, noise_sampler, uv_z2).r; let d_z2 = get_dist(q_z2, obj.params) - disp_strength * h_z2; let n_local = normalize(vec3(d_x1 - d_x2, d_y1 - d_y2, d_z1 - d_z2)); let normal_matrix = mat3x3(obj.inv_model[0].xyz, obj.inv_model[1].xyz, obj.inv_model[2].xyz); normal = normalize(transpose(normal_matrix) * n_local); // Apply texture to SDF color if (in.instance_index == 0u || obj_type == 4.0) { // Floor (index 0) or PLANE let uv_grid = p.xz * 0.5; let grid = 0.5 + 0.5 * sin(uv_grid.x * 3.14) * sin(uv_grid.y * 3.14); let grid_val = smoothstep(0.45, 0.55, grid); base_color = base_color * (0.5 + 0.5 * grid_val); } else { let uv_hit = vec2(atan2(q_hit.x, q_hit.z) / 6.28 + 0.5, acos(clamp(q_hit.y / length(q_hit), -1.0, 1.0)) / 3.14); let tex_val = textureSample(noise_tex, noise_sampler, uv_hit).r; base_color = base_color * (0.7 + 0.3 * tex_val); } } let shadow = calc_shadow(p, light_dir, 0.05, 20.0, in.instance_index); let lit_color = calculate_lighting(base_color, normal, p, shadow); var out: FragmentOutput; out.color = vec4(lit_color, 1.0); // Calculate and write correct depth let clip_pos = globals.view_proj * vec4(p, 1.0); out.depth = clip_pos.z / clip_pos.w; return out; }