diff options
Diffstat (limited to 'src/test_demo.cc')
| -rw-r--r-- | src/test_demo.cc | 136 |
1 files changed, 118 insertions, 18 deletions
diff --git a/src/test_demo.cc b/src/test_demo.cc index c26e65a..9ae0e3a 100644 --- a/src/test_demo.cc +++ b/src/test_demo.cc @@ -17,6 +17,95 @@ extern float GetDemoDuration(); extern void LoadTimeline(MainSequence& main_seq, WGPUDevice device, WGPUQueue queue, WGPUTextureFormat format); +// Inline peak meter effect for debugging audio-visual sync +#include "gpu/effects/post_process_helper.h" +class PeakMeterEffect : public PostProcessEffect { + public: + PeakMeterEffect(WGPUDevice device, WGPUQueue queue, WGPUTextureFormat format) + : PostProcessEffect(device, queue) { + const char* shader_code = R"( + struct VertexOutput { + @builtin(position) position: vec4<f32>, + @location(0) uv: vec2<f32>, + }; + + struct Uniforms { + peak_value: f32, + _pad0: f32, + _pad1: f32, + _pad2: f32, + }; + + @group(0) @binding(0) var inputSampler: sampler; + @group(0) @binding(1) var inputTexture: texture_2d<f32>; + @group(0) @binding(2) var<uniform> uniforms: Uniforms; + + @vertex + fn vs_main(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput { + var output: VertexOutput; + var pos = array<vec2<f32>, 3>( + vec2<f32>(-1.0, -1.0), + vec2<f32>(3.0, -1.0), + vec2<f32>(-1.0, 3.0) + ); + output.position = vec4<f32>(pos[vertexIndex], 0.0, 1.0); + output.uv = pos[vertexIndex] * 0.5 + 0.5; + return output; + } + + @fragment + fn fs_main(input: VertexOutput) -> @location(0) vec4<f32> { + let color = textureSample(inputTexture, inputSampler, input.uv); + + // Draw red horizontal bar in middle of screen + // Bar height: 5% of screen height + // Bar width: proportional to peak_value (0.0 to 1.0) + let bar_height = 0.05; + let bar_center_y = 0.5; + let bar_y_min = bar_center_y - bar_height * 0.5; + let bar_y_max = bar_center_y + bar_height * 0.5; + + // Bar extends from left (0.0) to peak_value position + let bar_x_max = uniforms.peak_value; + + // Check if current pixel is inside the bar + let in_bar_y = input.uv.y >= bar_y_min && input.uv.y <= bar_y_max; + let in_bar_x = input.uv.x <= bar_x_max; + + if (in_bar_y && in_bar_x) { + // Red bar + return vec4<f32>(1.0, 0.0, 0.0, 1.0); + } else { + // Original color + return color; + } + } + )"; + + pipeline_ = create_post_process_pipeline(device, format, shader_code); + uniforms_ = gpu_create_buffer( + device, 16, WGPUBufferUsage_Uniform | WGPUBufferUsage_CopyDst); + } + + void update_bind_group(WGPUTextureView input_view) { + pp_update_bind_group(device_, pipeline_, &bind_group_, input_view, uniforms_); + } + + void render(WGPURenderPassEncoder pass, float time, float beat, + float peak_value, float aspect_ratio) { + (void)time; + (void)beat; + (void)aspect_ratio; + + float uniforms[4] = {peak_value, 0.0f, 0.0f, 0.0f}; + wgpuQueueWriteBuffer(queue_, uniforms_.buffer, 0, uniforms, sizeof(uniforms)); + + wgpuRenderPassEncoderSetPipeline(pass, pipeline_); + wgpuRenderPassEncoderSetBindGroup(pass, 0, bind_group_, 0, nullptr); + wgpuRenderPassEncoderDraw(pass, 3, 1, 0, 0); + } +}; + #if !defined(STRIP_ALL) static void print_usage(const char* prog_name) { printf("Usage: %s [OPTIONS]\n", prog_name); @@ -104,6 +193,13 @@ int main(int argc, char** argv) { // Initialize platform, GPU, audio platform_state = platform_init(fullscreen_enabled, width, height); gpu_init(&platform_state); + + // Add peak meter visualization effect (renders as final post-process) +#if !defined(STRIP_ALL) + auto* peak_meter = new PeakMeterEffect(g_device, g_queue, g_format); + gpu_add_custom_effect(peak_meter, 0.0f, 99999.0f, 999); // High priority = renders last +#endif + audio_init(); static AudioEngine g_audio_engine; @@ -187,26 +283,30 @@ int main(int argc, char** argv) { gpu_resize(last_width, last_height); } - const double current_time = platform_state.time; + const double physical_time = platform_state.time; - // Auto-exit at end - if (demo_duration > 0.0f && current_time >= demo_duration) { + // Auto-exit at end (based on physical time for reliability) + if (demo_duration > 0.0f && physical_time >= demo_duration) { #if !defined(STRIP_ALL) - printf("test_demo finished at %.2f seconds.\n", current_time); + printf("test_demo finished at %.2f seconds.\n", physical_time); #endif break; } - fill_audio_buffer(current_time); + fill_audio_buffer(physical_time); + + // Audio-visual synchronization: Use audio playback time (not physical time!) + // This accounts for ring buffer latency automatically (no hardcoded constants) + const float audio_time = audio_get_playback_time(); // Audio/visual sync parameters const float aspect_ratio = platform_state.aspect_ratio; - // Use real-time peak for proper audio-visual synchronization + // Peak is measured at audio playback time, so it matches audio_time const float raw_peak = audio_get_realtime_peak(); const float visual_peak = fminf(raw_peak * 8.0f, 1.0f); - // Beat calculation (hardcoded BPM=120) - const float beat_time = (float)current_time * 120.0f / 60.0f; + // Beat calculation uses AUDIO TIME (what's being heard), not physical time + const float beat_time = audio_time * 120.0f / 60.0f; const int beat_number = (int)beat_time; const float beat = fmodf(beat_time, 1.0f); @@ -215,30 +315,30 @@ int main(int argc, char** argv) { if (peak_log) { if (log_peaks_fine) { // Log every frame for fine-grained analysis - fprintf(peak_log, "%d %.6f %.6f %d\n", frame_number, current_time, raw_peak, beat_number); + fprintf(peak_log, "%d %.6f %.6f %d\n", frame_number, audio_time, raw_peak, beat_number); } else if (beat_number != last_beat_logged) { // Log only at beat boundaries - fprintf(peak_log, "%d %.6f %.6f\n", beat_number, current_time, raw_peak); + fprintf(peak_log, "%d %.6f %.6f\n", beat_number, audio_time, raw_peak); last_beat_logged = beat_number; } } frame_number++; - // Debug output every 0.5 seconds + // Debug output every 0.5 seconds (based on audio time for consistency) static float last_print_time = -1.0f; - if (current_time - last_print_time >= 0.5f) { + if (audio_time - last_print_time >= 0.5f) { if (tempo_test_enabled) { - printf("[T=%.2f, MusicT=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f, Tempo=%.2fx]\n", - (float)current_time, g_music_time, beat_number, beat, visual_peak, g_tempo_scale); + printf("[AudioT=%.2f, PhysT=%.2f, MusicT=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f, Tempo=%.2fx]\n", + audio_time, (float)physical_time, g_music_time, beat_number, beat, visual_peak, g_tempo_scale); } else { - printf("[T=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f]\n", - (float)current_time, beat_number, beat, visual_peak); + printf("[AudioT=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f]\n", + audio_time, beat_number, beat, visual_peak); } - last_print_time = (float)current_time; + last_print_time = audio_time; } #endif - gpu_draw(visual_peak, aspect_ratio, (float)current_time, beat); + gpu_draw(visual_peak, aspect_ratio, audio_time, beat); audio_update(); } |
