// This file is part of the 64k demo project. // Standalone "mini-demo" for testing the 3D renderer. #include "3d/camera.h" #include "3d/object.h" #include "3d/renderer.h" #include "3d/scene.h" #include "gpu/texture_manager.h" #include "platform.h" #include "procedural/generator.h" #include #include #include #include #if defined(DEMO_CROSS_COMPILE_WIN32) #include #else #include #endif // Global State static Renderer3D g_renderer; static TextureManager g_textures; static Scene g_scene; static Camera g_camera; static WGPUDevice g_device = nullptr; static WGPUQueue g_queue = nullptr; static WGPUSurface g_surface = nullptr; static WGPUAdapter g_adapter = nullptr; static WGPUTextureFormat g_format = WGPUTextureFormat_Undefined; // ... (init_wgpu implementation same as before) void init_wgpu(PlatformState* platform_state) { WGPUInstance instance = wgpuCreateInstance(nullptr); if (!instance) { std::cerr << "Failed to create WGPU instance." << std::endl; exit(1); } g_surface = platform_create_wgpu_surface(instance, platform_state); if (!g_surface) { std::cerr << "Failed to create WGPU surface." << std::endl; exit(1); } WGPURequestAdapterOptions adapter_opts = {}; adapter_opts.compatibleSurface = g_surface; adapter_opts.powerPreference = WGPUPowerPreference_HighPerformance; #if defined(DEMO_CROSS_COMPILE_WIN32) auto on_adapter = [](WGPURequestAdapterStatus status, WGPUAdapter adapter, const char* message, void* userdata) { if (status == WGPURequestAdapterStatus_Success) { *(WGPUAdapter*)userdata = adapter; } }; wgpuInstanceRequestAdapter(instance, &adapter_opts, on_adapter, &g_adapter); #else auto on_adapter = [](WGPURequestAdapterStatus status, WGPUAdapter adapter, WGPUStringView message, void* userdata, void* user2) { if (status == WGPURequestAdapterStatus_Success) { *(WGPUAdapter*)userdata = adapter; } }; WGPURequestAdapterCallbackInfo adapter_cb = {}; adapter_cb.mode = WGPUCallbackMode_WaitAnyOnly; adapter_cb.callback = on_adapter; adapter_cb.userdata1 = &g_adapter; wgpuInstanceRequestAdapter(instance, &adapter_opts, adapter_cb); #endif #if !defined(DEMO_CROSS_COMPILE_WIN32) while (!g_adapter) { wgpuInstanceProcessEvents(instance); } #endif if (!g_adapter) { std::cerr << "Failed to get adapter." << std::endl; exit(1); } WGPUDeviceDescriptor device_desc = {}; #if defined(DEMO_CROSS_COMPILE_WIN32) auto on_device = [](WGPURequestDeviceStatus status, WGPUDevice device, const char* message, void* userdata) { if (status == WGPURequestDeviceStatus_Success) { *(WGPUDevice*)userdata = device; } }; wgpuAdapterRequestDevice(g_adapter, &device_desc, on_device, &g_device); #else auto on_device = [](WGPURequestDeviceStatus status, WGPUDevice device, WGPUStringView message, void* userdata, void* user2) { if (status == WGPURequestDeviceStatus_Success) { *(WGPUDevice*)userdata = device; } }; WGPURequestDeviceCallbackInfo device_cb = {}; device_cb.mode = WGPUCallbackMode_WaitAnyOnly; device_cb.callback = on_device; device_cb.userdata1 = &g_device; wgpuAdapterRequestDevice(g_adapter, &device_desc, device_cb); #endif #if !defined(DEMO_CROSS_COMPILE_WIN32) while (!g_device) { wgpuInstanceProcessEvents(instance); } #endif if (!g_device) { std::cerr << "Failed to get device." << std::endl; exit(1); } g_queue = wgpuDeviceGetQueue(g_device); WGPUSurfaceCapabilities caps = {}; wgpuSurfaceGetCapabilities(g_surface, g_adapter, &caps); g_format = caps.formats[0]; WGPUSurfaceConfiguration config = {}; config.device = g_device; config.format = g_format; config.usage = WGPUTextureUsage_RenderAttachment; config.width = platform_state->width; config.height = platform_state->height; config.presentMode = WGPUPresentMode_Fifo; config.alphaMode = WGPUCompositeAlphaMode_Opaque; wgpuSurfaceConfigure(g_surface, &config); } void setup_scene() { g_scene.clear(); srand(12345); // Fixed seed // Large floor, use CUBE type to exclude from SDF calculations Object3D floor(ObjectType::CUBE); floor.position = vec3(0, -2.0f, 0); floor.scale = vec3(20.0f, 0.5f, 20.0f); floor.color = vec4(0.9f, 0.9f, 0.9f, 1.0f); // Brighter white for better shadow contrast g_scene.add_object(floor); // Center object Object3D center(ObjectType::TORUS); center.position = vec3(0, 0, 0); center.scale = vec3(1.5f, 1.5f, 1.5f); center.color = vec4(1, 0, 0, 1); g_scene.add_object(center); // Random objects for (int i = 0; i < 30; ++i) { ObjectType type = ObjectType::SPHERE; int r = rand() % 3; if (r == 1) type = ObjectType::TORUS; if (r == 2) type = ObjectType::BOX; Object3D obj(type); float angle = (rand() % 360) * 0.01745f; float dist = 3.0f + (rand() % 100) * 0.05f; float height = -1.0f + (rand() % 100) * 0.04f; obj.position = vec3(std::cos(angle) * dist, height, std::sin(angle) * dist); float s = 0.3f + (rand() % 100) * 0.005f; obj.scale = vec3(s, s, s); obj.color = vec4((rand()%100)/100.0f, (rand()%100)/100.0f, (rand()%100)/100.0f, 1.0f); g_scene.add_object(obj); } } // Wrapper to generate periodic noise void gen_periodic_noise(uint8_t* buffer, int w, int h, const float* params, int num_params) { procedural::gen_noise(buffer, w, h, params, num_params); float p_params[] = {0.1f}; // 10% overlap procedural::make_periodic(buffer, w, h, p_params, 1); } int main(int argc, char** argv) { printf("Running 3D Renderer Test...\n"); #if !defined(STRIP_ALL) for (int i = 1; i < argc; ++i) { if (strcmp(argv[i], "--debug") == 0) { Renderer3D::SetDebugEnabled(true); } } #else (void)argc; (void)argv; #endif PlatformState platform_state = {}; platform_init(&platform_state, false, nullptr, nullptr); // The test's own WGPU init sequence init_wgpu(&platform_state); g_renderer.init(g_device, g_queue, g_format); g_renderer.resize(platform_state.width, platform_state.height); g_textures.init(g_device, g_queue); ProceduralTextureDef grid_def; grid_def.width = 256; grid_def.height = 256; grid_def.gen_func = procedural::gen_grid; grid_def.params = {10.0f, 1.0f}; // Frequency, thickness g_textures.create_procedural_texture("floor_grid", grid_def); g_renderer.set_noise_texture(g_textures.get_texture_view("floor_grid")); setup_scene(); g_camera.position = vec3(0, 5, 10); g_camera.target = vec3(0, 0, 0); float time = 0.0f; while (!platform_should_close(&platform_state)) { platform_poll(&platform_state); time = (float)platform_get_time(); float cam_radius = 10.0f + std::sin(time * 0.3f) * 4.0f; float cam_height = 5.0f + std::cos(time * 0.4f) * 3.0f; g_camera.set_look_at(vec3(std::sin(time * 0.5f) * cam_radius, cam_height, std::cos(time * 0.5f) * cam_radius), vec3(0, 0, 0), vec3(0, 1, 0)); g_camera.aspect_ratio = platform_get_aspect_ratio(&platform_state); for (size_t i = 1; i < g_scene.objects.size(); ++i) { g_scene.objects[i].rotation = quat::from_axis(vec3(0, 1, 0), time * 2.0f + i); g_scene.objects[i].position.y = std::sin(time * 3.0f + i) * 1.5f; } WGPUSurfaceTexture surface_tex; wgpuSurfaceGetCurrentTexture(g_surface, &surface_tex); if (surface_tex.status == WGPUSurfaceGetCurrentTextureStatus_SuccessOptimal) { WGPUTextureViewDescriptor view_desc = {}; view_desc.format = g_format; view_desc.dimension = WGPUTextureViewDimension_2D; view_desc.mipLevelCount = 1; view_desc.arrayLayerCount = 1; WGPUTextureView view = wgpuTextureCreateView(surface_tex.texture, &view_desc); g_renderer.render(g_scene, g_camera, time, view); wgpuTextureViewRelease(view); wgpuSurfacePresent(g_surface); wgpuTextureRelease(surface_tex.texture); } } g_renderer.shutdown(); g_textures.shutdown(); platform_shutdown(&platform_state); return 0; }