1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
|
// This file is part of the 64k demo project.
// It implements Hybrid3DEffect (simplified v2 port).
// TODO: Full Renderer3D integration with texture manager, noise assets
#include "effects/hybrid3_d_effect.h"
#include <cmath>
Hybrid3DEffect::Hybrid3DEffect(const GpuContext& ctx,
const std::vector<std::string>& inputs,
const std::vector<std::string>& outputs)
: Effect(ctx, inputs, outputs), depth_node_(outputs[0] + "_depth"),
dummy_texture_(nullptr), dummy_texture_view_(nullptr) {
// Initialize renderer (format is always RGBA8Unorm for v2)
renderer_.init(ctx_.device, ctx_.queue, WGPUTextureFormat_RGBA8Unorm);
// Create 1×1 white dummy texture for noise/sky (Renderer3D requires these)
WGPUTextureDescriptor tex_desc = {};
tex_desc.size = {1, 1, 1};
tex_desc.format = WGPUTextureFormat_RGBA8Unorm;
tex_desc.usage = WGPUTextureUsage_TextureBinding | WGPUTextureUsage_CopyDst;
tex_desc.dimension = WGPUTextureDimension_2D;
tex_desc.mipLevelCount = 1;
tex_desc.sampleCount = 1;
dummy_texture_ = wgpuDeviceCreateTexture(ctx_.device, &tex_desc);
dummy_texture_view_ = wgpuTextureCreateView(dummy_texture_, nullptr);
// Write white pixel
uint32_t white_pixel = 0xFFFFFFFF;
#if defined(DEMO_CROSS_COMPILE_WIN32)
WGPUImageCopyTexture dst = {
.texture = dummy_texture_,
.mipLevel = 0,
.origin = {0, 0, 0}
};
WGPUTextureDataLayout data_layout = {
.bytesPerRow = 4,
.rowsPerImage = 1
};
#else
WGPUTexelCopyTextureInfo dst = {
.texture = dummy_texture_,
.mipLevel = 0,
.origin = {0, 0, 0}
};
WGPUTexelCopyBufferLayout data_layout = {
.bytesPerRow = 4,
.rowsPerImage = 1
};
#endif
WGPUExtent3D size = {1, 1, 1};
wgpuQueueWriteTexture(ctx_.queue, &dst, &white_pixel, 4, &data_layout, &size);
renderer_.set_noise_texture(dummy_texture_view_);
renderer_.set_sky_texture(dummy_texture_view_);
initialized_ = true;
// Setup simple scene (1 center cube + 8 surrounding objects)
scene_.clear();
Object3D center(ObjectType::BOX);
center.position = vec3(0, 0, 0);
center.color = vec4(1, 0, 0, 1);
scene_.add_object(center);
for (int i = 0; i < 8; ++i) {
ObjectType type = (i % 3 == 1) ? ObjectType::TORUS :
(i % 3 == 2) ? ObjectType::BOX : ObjectType::SPHERE;
Object3D obj(type);
float angle = (i / 8.0f) * 6.28318f;
obj.position = vec3(std::cos(angle) * 4.0f, 0, std::sin(angle) * 4.0f);
obj.scale = vec3(0.7f, 0.7f, 0.7f);
if (type == ObjectType::SPHERE)
obj.color = vec4(0, 1, 0, 1);
else if (type == ObjectType::TORUS)
obj.color = vec4(0, 0.5f, 1, 1);
else
obj.color = vec4(1, 1, 0, 1);
scene_.add_object(obj);
}
}
Hybrid3DEffect::~Hybrid3DEffect() {
if (dummy_texture_view_)
wgpuTextureViewRelease(dummy_texture_view_);
if (dummy_texture_)
wgpuTextureRelease(dummy_texture_);
renderer_.shutdown();
}
void Hybrid3DEffect::declare_nodes(NodeRegistry& registry) {
// Declare depth buffer node
registry.declare_node(depth_node_, NodeType::DEPTH24, -1, -1);
}
void Hybrid3DEffect::render(WGPUCommandEncoder encoder,
const UniformsSequenceParams& params,
NodeRegistry& nodes) {
// Update camera (orbiting)
float angle = params.time * 0.3f;
vec3 cam_pos = vec3(std::cos(angle) * 10.0f, 5.0f, std::sin(angle) * 10.0f);
camera_.position = cam_pos;
camera_.target = vec3(0, 0, 0);
camera_.aspect_ratio = params.aspect_ratio;
// Get output views
WGPUTextureView color_view = nodes.get_view(output_nodes_[0]);
WGPUTextureView depth_view = nodes.get_view(depth_node_);
// Render 3D scene
renderer_.render(scene_, camera_, params.time, color_view, depth_view);
}
|