diff options
Diffstat (limited to 'src/test_demo.cc')
| -rw-r--r-- | src/test_demo.cc | 69 |
1 files changed, 26 insertions, 43 deletions
diff --git a/src/test_demo.cc b/src/test_demo.cc index b8e9381..9cbeae2 100644 --- a/src/test_demo.cc +++ b/src/test_demo.cc @@ -21,33 +21,25 @@ extern void LoadTimeline(MainSequence& main_seq, const GpuContext& ctx); // Inline peak meter effect for debugging audio-visual sync #include "gpu/effects/post_process_helper.h" +#include "gpu/effects/shader_composer.h" + class PeakMeterEffect : public PostProcessEffect { public: PeakMeterEffect(const GpuContext& ctx) : PostProcessEffect(ctx) { - // Use standard post-process binding macros - const char* shader_code = R"( + // Use ShaderComposer to include CommonUniforms from common_uniforms.wgsl + const char* shader_main = R"( struct VertexOutput { @builtin(position) position: vec4<f32>, @location(0) uv: vec2<f32>, }; - struct Uniforms { - resolution: vec2<f32>, - _pad0: f32, - _pad1: f32, - aspect_ratio: f32, - time: f32, - beat: f32, - audio_intensity: f32, - }; - struct EffectParams { unused: f32, }; @group(0) @binding(0) var inputSampler: sampler; @group(0) @binding(1) var inputTexture: texture_2d<f32>; - @group(0) @binding(2) var<uniform> uniforms: Uniforms; + @group(0) @binding(2) var<uniform> uniforms: CommonUniforms; @group(0) @binding(3) var<uniform> params: EffectParams; @vertex @@ -86,32 +78,23 @@ class PeakMeterEffect : public PostProcessEffect { } )"; + // Compose shader with common_uniforms to get CommonUniforms definition + std::string shader_code = ShaderComposer::Get().Compose( + {"common_uniforms"}, shader_main); + pipeline_ = - create_post_process_pipeline(ctx_.device, ctx_.format, shader_code); + create_post_process_pipeline(ctx_.device, ctx_.format, shader_code.c_str()); } - void update_bind_group(WGPUTextureView input_view) { + void update_bind_group(WGPUTextureView input_view) override { pp_update_bind_group(ctx_.device, pipeline_, &bind_group_, input_view, uniforms_.get(), {}); } - void render(WGPURenderPassEncoder pass, float time, float beat, - float peak_value, float aspect_ratio) { - (void)time; - (void)beat; - - CommonPostProcessUniforms u = { - .resolution = {(float)width_, (float)height_}, - .aspect_ratio = aspect_ratio, - .time = time, - .beat = beat, - .audio_intensity = peak_value, - }; - uniforms_.update(ctx_.queue, u); - - wgpuRenderPassEncoderSetPipeline(pass, pipeline_); - wgpuRenderPassEncoderSetBindGroup(pass, 0, bind_group_, 0, nullptr); - wgpuRenderPassEncoderDraw(pass, 3, 1, 0, 0); // Full-screen triangle + void render(WGPURenderPassEncoder pass, + const CommonPostProcessUniforms& uniforms) override { + uniforms_.update(ctx_.queue, uniforms); + PostProcessEffect::render(pass, uniforms); } }; @@ -347,11 +330,10 @@ int main(int argc, char** argv) { const float raw_peak = audio_get_realtime_peak(); const float visual_peak = fminf(raw_peak * 8.0f, 1.0f); - // Beat calculation should use audio time to align with audio events. - // The graphics loop time (current_physical_time) is used for frame rate. - const float beat_time = current_audio_time * g_tracker_score.bpm / 60.0f; - const int beat_number = (int)beat_time; - const float beat = fmodf(beat_time, 1.0f); // Fractional part (0.0 to 1.0) + // Beat calculation: convert audio time to musical beats + const float absolute_beat_time = current_audio_time * g_tracker_score.bpm / 60.0f; + const int beat_number = (int)absolute_beat_time; + const float beat_phase = fmodf(absolute_beat_time, 1.0f); // Fractional part (0.0 to 1.0) #if !defined(STRIP_ALL) // Log peak (either per-frame or per-beat) @@ -377,22 +359,23 @@ int main(int argc, char** argv) { if (current_physical_time - last_graphics_print_time >= 0.5f) { if (tempo_test_enabled) { printf( - "[GraphicsT=%.2f, AudioT=%.2f, MusicT=%.2f, Beat=%d, Frac=%.2f, " + "[GraphicsT=%.2f, AudioT=%.2f, MusicT=%.2f, Beat=%d, Phase=%.2f, " "Peak=%.2f, Tempo=%.2fx]\n", current_physical_time, current_audio_time, g_music_time, - beat_number, beat, visual_peak, g_tempo_scale); + beat_number, beat_phase, visual_peak, g_tempo_scale); } else { - printf("[GraphicsT=%.2f, AudioT=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f]\n", - current_physical_time, current_audio_time, beat_number, beat, + printf("[GraphicsT=%.2f, AudioT=%.2f, Beat=%d, Phase=%.2f, Peak=%.2f]\n", + current_physical_time, current_audio_time, beat_number, beat_phase, visual_peak); } last_graphics_print_time = current_physical_time; } #endif - // Draw graphics using the graphics frame time and synchronized audio events + // Draw graphics using physical time and musical beat time const float graphics_frame_time = (float)current_physical_time; - gpu_draw(visual_peak, aspect_ratio, graphics_frame_time, beat); + gpu_draw(visual_peak, aspect_ratio, graphics_frame_time, + absolute_beat_time, beat_phase); // Update audio systems (tracker, synth, etc.) based on audio time // progression |
