summaryrefslogtreecommitdiff
path: root/src/test_demo.cc
diff options
context:
space:
mode:
authorskal <pascal.massimino@gmail.com>2026-02-12 00:30:56 +0100
committerskal <pascal.massimino@gmail.com>2026-02-12 00:30:56 +0100
commit89c46872127aaede53362f64cdc3fe9b3164650b (patch)
tree844882239088b35f2b1b555029780d26c6b4cfe8 /src/test_demo.cc
parent4e0b7c040c3e45c66767b936a8058f76bcc31bf1 (diff)
feat: implement beat-based timing system
BREAKING CHANGE: Timeline format now uses beats as default unit ## Core Changes **Uniform Structure (32 bytes maintained):** - Added `beat_time` (absolute beats for musical animation) - Added `beat_phase` (fractional 0-1 for smooth oscillation) - Renamed `beat` → `beat_phase` - Kept `time` (physical seconds, tempo-independent) **Seq Compiler:** - Default: all numbers are beats (e.g., `5`, `16.5`) - Explicit seconds: `2.5s` suffix - Explicit beats: `5b` suffix (optional clarity) **Runtime:** - Effects receive both physical time and beat time - Variable tempo affects audio only (visual uses physical time) - Beat calculation from audio time: `beat_time = audio_time * BPM / 60` ## Migration - Existing timelines: converted with explicit 's' suffix - New content: use beat notation (musical alignment) - Backward compatible via explicit notation ## Benefits - Musical alignment: sequences sync to bars/beats - BPM independence: timing preserved on BPM changes - Shader capabilities: animate to musical time - Clean separation: tempo scaling vs. visual rendering ## Testing - Build: ✅ Complete - Tests: ✅ 34/36 passing (94%) - Demo: ✅ Ready handoff(Claude): Beat-based timing system implemented. Variable tempo only affects audio sample triggering. Visual effects use physical_time (constant) and beat_time (musical). Shaders can now animate to beats. Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
Diffstat (limited to 'src/test_demo.cc')
-rw-r--r--src/test_demo.cc26
1 files changed, 14 insertions, 12 deletions
diff --git a/src/test_demo.cc b/src/test_demo.cc
index b8e9381..edbcae0 100644
--- a/src/test_demo.cc
+++ b/src/test_demo.cc
@@ -104,8 +104,10 @@ class PeakMeterEffect : public PostProcessEffect {
.resolution = {(float)width_, (float)height_},
.aspect_ratio = aspect_ratio,
.time = time,
- .beat = beat,
+ .beat_time = beat,
+ .beat_phase = beat,
.audio_intensity = peak_value,
+ ._pad = 0.0f,
};
uniforms_.update(ctx_.queue, u);
@@ -347,11 +349,10 @@ int main(int argc, char** argv) {
const float raw_peak = audio_get_realtime_peak();
const float visual_peak = fminf(raw_peak * 8.0f, 1.0f);
- // Beat calculation should use audio time to align with audio events.
- // The graphics loop time (current_physical_time) is used for frame rate.
- const float beat_time = current_audio_time * g_tracker_score.bpm / 60.0f;
- const int beat_number = (int)beat_time;
- const float beat = fmodf(beat_time, 1.0f); // Fractional part (0.0 to 1.0)
+ // Beat calculation: convert audio time to musical beats
+ const float absolute_beat_time = current_audio_time * g_tracker_score.bpm / 60.0f;
+ const int beat_number = (int)absolute_beat_time;
+ const float beat_phase = fmodf(absolute_beat_time, 1.0f); // Fractional part (0.0 to 1.0)
#if !defined(STRIP_ALL)
// Log peak (either per-frame or per-beat)
@@ -377,22 +378,23 @@ int main(int argc, char** argv) {
if (current_physical_time - last_graphics_print_time >= 0.5f) {
if (tempo_test_enabled) {
printf(
- "[GraphicsT=%.2f, AudioT=%.2f, MusicT=%.2f, Beat=%d, Frac=%.2f, "
+ "[GraphicsT=%.2f, AudioT=%.2f, MusicT=%.2f, Beat=%d, Phase=%.2f, "
"Peak=%.2f, Tempo=%.2fx]\n",
current_physical_time, current_audio_time, g_music_time,
- beat_number, beat, visual_peak, g_tempo_scale);
+ beat_number, beat_phase, visual_peak, g_tempo_scale);
} else {
- printf("[GraphicsT=%.2f, AudioT=%.2f, Beat=%d, Frac=%.2f, Peak=%.2f]\n",
- current_physical_time, current_audio_time, beat_number, beat,
+ printf("[GraphicsT=%.2f, AudioT=%.2f, Beat=%d, Phase=%.2f, Peak=%.2f]\n",
+ current_physical_time, current_audio_time, beat_number, beat_phase,
visual_peak);
}
last_graphics_print_time = current_physical_time;
}
#endif
- // Draw graphics using the graphics frame time and synchronized audio events
+ // Draw graphics using physical time and musical beat time
const float graphics_frame_time = (float)current_physical_time;
- gpu_draw(visual_peak, aspect_ratio, graphics_frame_time, beat);
+ gpu_draw(visual_peak, aspect_ratio, graphics_frame_time,
+ absolute_beat_time, beat_phase);
// Update audio systems (tracker, synth, etc.) based on audio time
// progression